You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2017/04/06 09:33:22 UTC

[01/41] ambari git commit: AMBARI-20677.Centering workflows for zoom breaks when multiple tabs exists(M Madhan Mohan Reddy via padmapriyanitt)

Repository: ambari
Updated Branches:
  refs/heads/branch-3.0-perf 8bef3b489 -> 8de3961b6


AMBARI-20677.Centering workflows for zoom breaks when multiple tabs exists(M Madhan Mohan Reddy via padmapriyanitt)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fb4637b3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fb4637b3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fb4637b3

Branch: refs/heads/branch-3.0-perf
Commit: fb4637b33df2c511b1d5be17ef63fdeb887e7ee9
Parents: 8c58e67
Author: padmapriyanitt <pa...@gmail.com>
Authored: Thu Apr 6 12:16:39 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../resources/ui/app/components/designer-workspace.js  |  1 +
 .../main/resources/ui/app/components/flow-designer.js  |  3 +++
 .../resources/ui/app/domain/cytoscape-flow-renderer.js | 13 ++++++++++---
 3 files changed, 14 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fb4637b3/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
index 980904f..de1eb0e 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
@@ -305,6 +305,7 @@ export default Ember.Component.extend({
       if(tab.type === 'wf' && tab.context){
         CommonUtils.setTestContext(tab.context);
         tab.context.resize();
+        tab.context.centerGraph();
       }else if(tab.type === 'dashboard'){
         this.sendAction('showDashboard');
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb4637b3/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
index fa7c861..cccf6d0 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
@@ -248,6 +248,9 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
   resize(){
     this.flowRenderer.resize();
   },
+  centerGraph(){
+    this.flowRenderer.setGraphCenter();
+  },
   cleanupFlowRenderer:function(){
     this.set('renderNodeTransitions',false);
     this.flowRenderer.cleanup();

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb4637b3/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
index af84f86..03f0ae1 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
@@ -67,11 +67,18 @@ var CytoscapeRenderer= Ember.Object.extend({
   _setCyOverflow() {
     Ember.set(this.get("cyOverflow"), "overflown", this.cy.elements().renderedBoundingBox().y2 > this.cy.height());
   },
-  _setGraphCenter() {
+  setGraphCenter() {
+    if (this.cy && !this.centered){
+      Ember.run.later(this, function() {
+        this._setGraphCenterOnStartNode();
+      },50);
+    }
+  },
+  _setGraphCenterOnStartNode() {
     var startDataNode = this.get("dataNodes").filterBy("data.type", "start");
     if (startDataNode[0] && startDataNode[0].data.id) {
       var startNode = this.cy.$("#" + startDataNode[0].data.id);
-      this.cy.center();
+      this.cy.center(startNode);
       this.cy.pan({y:50});
     }
   },
@@ -400,7 +407,7 @@ var CytoscapeRenderer= Ember.Object.extend({
     this.cy.endBatch();
     this.cy.layout(this.get("layoutConfigs"));
     this._setCyOverflow();
-    this._setGraphCenter();
+    this._setGraphCenterOnStartNode();
   },
 
   initRenderer(callback, settings){


[17/41] ambari git commit: AMBARI-20663. During finalize of HDP upgrade, Ambari should provide a message regarding HBase snapshots (dlysnichenko)

Posted by ao...@apache.org.
AMBARI-20663. During finalize of HDP upgrade, Ambari should provide a message regarding HBase snapshots (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1b663da9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1b663da9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1b663da9

Branch: refs/heads/branch-3.0-perf
Commit: 1b663da93662ecda43e116a201a8527d35bb4e5b
Parents: 50a41a8
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Mon Apr 3 21:36:20 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml    | 10 ++++++++++
 .../stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml    | 10 ++++++++++
 .../stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml    | 10 ++++++++++
 .../stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml    | 10 ++++++++++
 .../resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml     | 10 ++++++++++
 .../resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml     | 10 ++++++++++
 .../resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml     | 10 ++++++++++
 .../resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml     | 10 ++++++++++
 .../stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml    | 10 ++++++++++
 .../stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml    | 10 ++++++++++
 .../stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml    | 10 ++++++++++
 .../resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml     | 10 ++++++++++
 .../resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml     | 10 ++++++++++
 .../resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml     | 10 ++++++++++
 .../stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml    | 10 ++++++++++
 .../stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml    | 10 ++++++++++
 .../resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml     | 10 ++++++++++
 .../resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml     | 10 ++++++++++
 .../stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml    | 10 ++++++++++
 .../resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml     | 10 ++++++++++
 .../stacks/PERF/1.0/upgrades/nonrolling-upgrade-2.0.xml   | 10 ++++++++++
 .../resources/stacks/PERF/1.0/upgrades/upgrade-2.0.xml    | 10 ++++++++++
 22 files changed, 220 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
index 28ca939..3d8041e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
@@ -661,6 +661,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
index 9aff1a9..fa06291 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
@@ -806,6 +806,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
index 73a4f5e..8b8b9a6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
@@ -958,6 +958,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
index 3a7df7d..2bf6e23 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
@@ -1056,6 +1056,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
index 89b5126..1340b22 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
@@ -410,6 +410,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
index 4e897a0..40afc4f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
@@ -454,6 +454,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
index ca612b8..e0882d8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
@@ -540,6 +540,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
index 1fbf81e..0f4efdc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
@@ -542,6 +542,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
index 3c0104b..13f44fb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
@@ -652,6 +652,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
index bb6506e..ea261b9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
@@ -919,6 +919,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
index c6c3a0d..de6b8ef 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
@@ -1014,6 +1014,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
index 10f07f9..d5e9a5b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
@@ -406,6 +406,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
index 4654ac8..350395c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
@@ -525,6 +525,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
index ec7d8b8..9ac3d52 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
@@ -531,6 +531,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
index 95ca3a2..0f05089 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
@@ -741,6 +741,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index 05ecb2c..a6bc4fc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -1047,6 +1047,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml
index 37f92fd..04a06e8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml
@@ -450,6 +450,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
index 0dab6f4..879fe0f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
@@ -461,6 +461,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
index 4a641ec..68c58c0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
@@ -743,6 +743,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
index 0587741..01f11e4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
@@ -477,6 +477,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Delete HBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any HBase snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following HBase shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/PERF/1.0/upgrades/nonrolling-upgrade-2.0.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/upgrades/nonrolling-upgrade-2.0.xml b/ambari-server/src/main/resources/stacks/PERF/1.0/upgrades/nonrolling-upgrade-2.0.xml
index 2626bc3..7078a6e 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/upgrades/nonrolling-upgrade-2.0.xml
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/upgrades/nonrolling-upgrade-2.0.xml
@@ -286,6 +286,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="FAKEHBASE" component="FAKEHBASE_MASTER" title="Delete FAKEHBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any FAKEHBASE snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following FAKEHBASE shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="FAKEHDFS" component="FAKENAMENODE" title="Execute FAKEHDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b663da9/ambari-server/src/main/resources/stacks/PERF/1.0/upgrades/upgrade-2.0.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/upgrades/upgrade-2.0.xml b/ambari-server/src/main/resources/stacks/PERF/1.0/upgrades/upgrade-2.0.xml
index 99746d6..9c9cebb 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/upgrades/upgrade-2.0.xml
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/upgrades/upgrade-2.0.xml
@@ -172,6 +172,16 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="FAKEHBASE" component="FAKEHBASE_MASTER" title="Delete FAKEHBase snapshots">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>You can now remove any FAKEHBASE snapshots which were created at the beginning of the upgrade. To see existing snapshots, use the following FAKEHBASE shell command:</message>
+          <message>hbase> list_snapshots</message>
+          <message>Once you have found an existing snapshot which you would like to remove, you can use the following command:</message>
+          <message>hbase> delete_snapshot 'snapshotName'</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="FAKEHDFS" component="FAKENAMENODE" title="Execute FAKEHDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>


[25/41] ambari git commit: AMBARI-20664. Remove hardcoding of /usr/hdp by AMBARI-20543 needed for IBM Power (aonishuk)

Posted by ao...@apache.org.
AMBARI-20664. Remove hardcoding of /usr/hdp by AMBARI-20543 needed for IBM Power (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/27105de9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/27105de9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/27105de9

Branch: refs/heads/branch-3.0-perf
Commit: 27105de959cac3c3b0e4b806dd341a7c4417bc41
Parents: 7d4df5b
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Apr 4 11:41:31 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml    | 2 +-
 .../common-services/TEZ/0.4.0.2.1/configuration/tez-site.xml       | 2 +-
 .../common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py  | 2 ++
 .../YARN/2.1.0.2.0/configuration-mapred/mapred-site.xml            | 2 +-
 .../common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py | 1 +
 .../resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py  | 1 +
 .../stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml      | 2 +-
 .../stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml      | 2 +-
 .../stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml      | 2 +-
 .../stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml      | 2 +-
 10 files changed, 11 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/27105de9/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
index 4b4008d..bb671cc 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
@@ -368,7 +368,7 @@ fi
 export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 
 #Mostly required for hadoop 2.0
-export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-{{architecture}}-64:/usr/hdp/current/hadoop-client/lib/native/Linux-{{architecture}}-64
+export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:{{hadoop_lib_home}}/native/Linux-{{architecture}}-64
 
 {% if is_datanode_max_locked_memory_set %}
 # Fix temporary bug, when ulimit from conf files is not picked up, without full relogin. 

http://git-wip-us.apache.org/repos/asf/ambari/blob/27105de9/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/configuration/tez-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/configuration/tez-site.xml b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/configuration/tez-site.xml
index d883ec2..c6ddfa5 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/configuration/tez-site.xml
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/configuration/tez-site.xml
@@ -174,7 +174,7 @@
   </property>
   <property>
     <name>tez.am.env</name>
-    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/Linux-{{architecture}}-64:/usr/hdp/current/hadoop-client/lib/native/Linux-{{architecture}}-64</value>
+    <value>LD_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-{{architecture}}-64</value>
     <description>
         Additional execution environment entries for tez. This is not an additive property. You must preserve the original value if
         you want to have access to native libraries.

http://git-wip-us.apache.org/repos/asf/ambari/blob/27105de9/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index 5a028bd..4d63685 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -47,6 +47,8 @@ stack_version_formatted = format_stack_version(stack_version_unformatted)
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
 
+hadoop_lib_home = stack_select.get_hadoop_dir("lib")
+
 # default hadoop parameters
 hadoop_home = '/usr'
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin")

http://git-wip-us.apache.org/repos/asf/ambari/blob/27105de9/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-site.xml b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-site.xml
index e2ef1b1..a7d8cd6 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-site.xml
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-site.xml
@@ -438,7 +438,7 @@
   </property>
   <property>
     <name>mapreduce.admin.user.env</name>
-    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/Linux-{{architecture}}-64:/usr/hdp/current/hadoop-client/lib/native/Linux-{{architecture}}-64</value>
+    <value>LD_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-{{architecture}}-64</value>
     <description>
       Additional execution environment entries for map and reduce task processes.
       This is not an additive property. You must preserve the original value if

http://git-wip-us.apache.org/repos/asf/ambari/blob/27105de9/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index a4de8fa..88be29c 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -90,6 +90,7 @@ hostname = config['hostname']
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 hadoop_bin = stack_select.get_hadoop_dir("sbin")
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_yarn_home = '/usr/lib/hadoop-yarn'
 hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"

http://git-wip-us.apache.org/repos/asf/ambari/blob/27105de9/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index cab9102..4052d1d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -106,6 +106,7 @@ mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
 hadoop_home = stack_select.get_hadoop_dir("home", force_latest_on_upgrade=True)
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec", force_latest_on_upgrade=True)
+hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hadoop_secure_dn_user = hdfs_user

http://git-wip-us.apache.org/repos/asf/ambari/blob/27105de9/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index bf7d750..6574bc5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -169,7 +169,7 @@ fi
 export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 
 # Mostly required for hadoop 2.0
-export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-{{architecture}}-64:/usr/hdp/current/hadoop-client/lib/native/Linux-{{architecture}}-64
+export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:{{hadoop_lib_home}}/native/Linux-{{architecture}}-64
 
 export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/27105de9/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
index 99d38e2..1bfd2fe 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
@@ -143,7 +143,7 @@ export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}
 export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 
 # Mostly required for hadoop 2.0
-export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-{{architecture}}-64:/usr/hdp/current/hadoop-client/lib/native/Linux-{{architecture}}-64
+export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:{{hadoop_lib_home}}/native/Linux-{{architecture}}-64
 
 export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/27105de9/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
index 99d38e2..1bfd2fe 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
@@ -143,7 +143,7 @@ export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}
 export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 
 # Mostly required for hadoop 2.0
-export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-{{architecture}}-64:/usr/hdp/current/hadoop-client/lib/native/Linux-{{architecture}}-64
+export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:{{hadoop_lib_home}}/native/Linux-{{architecture}}-64
 
 export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/27105de9/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml
index ee7478b..04b9304 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml
@@ -143,7 +143,7 @@
       export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 
       # Mostly required for hadoop 2.0
-      export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-{{architecture}}-64:/usr/hdp/current/hadoop-client/lib/native/Linux-{{architecture}}-64
+      export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:{{hadoop_lib_home}}/native/Linux-{{architecture}}-64
 
       export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
 


[11/41] ambari git commit: AMBARI-20632. With multi-process StatusCommandsExecutor, Status commands are taking too long to report back (echekanskiy)

Posted by ao...@apache.org.
AMBARI-20632. With multi-process StatusCommandsExecutor, Status commands are taking too long to report back (echekanskiy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ccf9edbc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ccf9edbc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ccf9edbc

Branch: refs/heads/branch-3.0-perf
Commit: ccf9edbc14d9d38f1924ab1defa8ca8b6f73f3f9
Parents: 4f5ac09
Author: Eugene Chekanskiy <ec...@hortonworks.com>
Authored: Mon Apr 3 16:46:27 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../ambari_agent/StatusCommandsExecutor.py      | 209 +++++++++----------
 .../src/main/python/ambari_agent/main.py        |   5 +-
 2 files changed, 98 insertions(+), 116 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ccf9edbc/ambari-agent/src/main/python/ambari_agent/StatusCommandsExecutor.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/StatusCommandsExecutor.py b/ambari-agent/src/main/python/ambari_agent/StatusCommandsExecutor.py
index 04a3e85..142e7ca 100644
--- a/ambari-agent/src/main/python/ambari_agent/StatusCommandsExecutor.py
+++ b/ambari-agent/src/main/python/ambari_agent/StatusCommandsExecutor.py
@@ -49,7 +49,7 @@ class SingleProcessStatusCommandsExecutor(StatusCommandsExecutor):
     self.config = config
     self.actionQueue = actionQueue
     self.statusCommandQueue = Queue.Queue()
-    self.need_relaunch = False
+    self.need_relaunch = (False, None) #  tuple (bool, str|None) with flag to relaunch and reason of relaunch
 
   def put_commands(self, commands):
     with self.statusCommandQueue.mutex:
@@ -88,12 +88,13 @@ class MultiProcessStatusCommandsExecutor(StatusCommandsExecutor):
     self.config = config
     self.actionQueue = actionQueue
 
-    self._can_relaunch_lock = threading.RLock()
-    self._can_relaunch = True
+    self.can_relaunch = True
 
     # used to prevent queues from been used during creation of new one to prevent threads messing up with combination of
     # old and new queues
     self.usage_lock = threading.RLock()
+    # protects against simultaneous killing/creating from different threads.
+    self.kill_lock = threading.RLock()
 
     self.status_command_timeout = int(self.config.get('agent', 'status_command_timeout', 5))
     self.customServiceOrchestrator = self.actionQueue.customServiceOrchestrator
@@ -107,42 +108,32 @@ class MultiProcessStatusCommandsExecutor(StatusCommandsExecutor):
     self.mp_result_logs = multiprocessing.Queue()
     self.mp_task_queue = multiprocessing.Queue()
 
-  @property
-  def can_relaunch(self):
-    with self._can_relaunch_lock:
-      return self._can_relaunch
-
-  @can_relaunch.setter
-  def can_relaunch(self, value):
-    with self._can_relaunch_lock:
-      self._can_relaunch = value
-
-  def _log_message(self, level, message, exception=None):
-    """
-    Put log message to logging queue. Must be used only for logging from child process(in _worker_process_target).
-
-    :param level:
-    :param message:
-    :param exception:
-    :return:
+  def _drain_queue(self, target_queue, max_time=5, max_empty_count=15, read_break=.001):
     """
-    result_message = "StatusCommandExecutor reporting at {0}: ".format(time.time()) + message
-    self.mp_result_logs.put((level, result_message, exception))
-
-  def _get_log_messages(self):
-    """
-    Returns list of (level, message, exception) log messages.
-
-    :return: list of (level, message, exception)
+    Read everything that available in queue. Using not reliable multiprocessing.Queue methods(qsize, empty), so contains
+    extremely dumb protection against blocking too much at this method: will try to get all possible items for not more
+    than ``max_time`` seconds; will return after ``max_empty_count`` calls of ``target_queue.get(False)`` that raised
+    ``Queue.Empty`` exception. Notice ``read_break`` argument, with default values this method will be able to read
+    ~4500 ``range(1,10000)`` objects for 5 seconds. So don't fill queue too fast.
+
+    :param target_queue: queue to read from
+    :param max_time: maximum time to spend in this method call
+    :param max_empty_count: maximum allowed ``Queue.Empty`` in a row
+    :param read_break: time to wait before next read cycle iteration
+    :return: list of resulting objects
     """
     results = []
+    _empty = 0
+    _start = time.time()
     with self.usage_lock:
       try:
-        while not self.mp_result_logs.empty():
+        while (not target_queue.empty() or target_queue.qsize() > 0) and time.time() - _start < max_time and _empty < max_empty_count:
           try:
-            results.append(self.mp_result_logs.get(False))
+            results.append(target_queue.get(False))
+            _empty = 0
+            time.sleep(read_break) # sleep a little to get more accurate empty and qsize results
           except Queue.Empty:
-            pass
+            _empty += 1
           except IOError:
             pass
           except UnicodeDecodeError:
@@ -151,11 +142,23 @@ class MultiProcessStatusCommandsExecutor(StatusCommandsExecutor):
         pass
     return results
 
+  def _log_message(self, level, message, exception=None):
+    """
+    Put log message to logging queue. Must be used only for logging from child process(in _worker_process_target).
+
+    :param level:
+    :param message:
+    :param exception:
+    :return:
+    """
+    result_message = "StatusCommandExecutor reporting at {0}: ".format(time.time()) + message
+    self.mp_result_logs.put((level, result_message, exception))
+
   def _process_logs(self):
     """
     Get all available at this moment logs and prints them to logger.
     """
-    for level, message, exception in self._get_log_messages():
+    for level, message, exception in self._drain_queue(self.mp_result_logs):
       if level == logging.ERROR:
         logger.debug(message, exc_info=exception)
       if level == logging.WARN:
@@ -256,16 +259,6 @@ class MultiProcessStatusCommandsExecutor(StatusCommandsExecutor):
     :return:
     """
     with self.usage_lock:
-      if not self.mp_task_queue.empty():
-        status_command_queue_size = 0
-        try:
-          while not self.mp_task_queue.empty():
-            self.mp_task_queue.get(False)
-            status_command_queue_size += 1
-        except Queue.Empty:
-          pass
-
-        logger.info("Number of status commands removed from queue : " + str(status_command_queue_size))
       for command in commands:
         logger.info("Adding " + command['commandType'] + " for component " + \
                     command['componentName'] + " of service " + \
@@ -276,43 +269,29 @@ class MultiProcessStatusCommandsExecutor(StatusCommandsExecutor):
 
   def process_results(self):
     """
-    Process all the results from the internal worker
+    Process all the results from the SCE worker process.
     """
     self._process_logs()
-    for result in self._get_results():
+    results = self._drain_queue(self.mp_result_queue)
+    logger.debug("Drained %s status commands results, ~%s remains in queue", len(results), self.mp_result_queue.qsize())
+    for result in results:
       try:
         self.actionQueue.process_status_command_result(result)
       except UnicodeDecodeError:
         pass
 
-  def _get_results(self):
-    """
-    Get all available results for status commands.
-
-    :return: list of results
-    """
-    results = []
-    with self.usage_lock:
-      try:
-        while not self.mp_result_queue.empty():
-          try:
-            results.append(self.mp_result_queue.get(False))
-          except Queue.Empty:
-            pass
-          except IOError:
-            pass
-          except UnicodeDecodeError:
-            pass
-      except IOError:
-        pass
-    return results
-
   @property
   def need_relaunch(self):
     """
     Indicates if process need to be relaunched due to timeout or it is dead or even was not created.
+
+    :return: tuple (bool, str|None) with flag to relaunch and reason of relaunch
     """
-    return self.timedOutEvent.is_set() or not self.worker_process or not self.worker_process.is_alive()
+    if not self.worker_process or not self.worker_process.is_alive():
+      return True, "WORKER_DEAD"
+    elif self.timedOutEvent.is_set():
+      return True, "COMMAND_TIMEOUT"
+    return False, None
 
   def relaunch(self, reason=None):
     """
@@ -321,13 +300,15 @@ class MultiProcessStatusCommandsExecutor(StatusCommandsExecutor):
     :param reason: reason of restart
     :return:
     """
-    if self.can_relaunch:
-      self.kill(reason)
-      self.worker_process = multiprocessing.Process(target=self._worker_process_target)
-      self.worker_process.start()
-      logger.info("Started process with pid {0}".format(self.worker_process.pid))
-    else:
-      logger.debug("Relaunch does not allowed, can not relaunch")
+    with self.kill_lock:
+      logger.info("Relaunching child process reason:" + str(reason))
+      if self.can_relaunch:
+        self.kill(reason)
+        self.worker_process = multiprocessing.Process(target=self._worker_process_target)
+        self.worker_process.start()
+        logger.info("Started process with pid {0}".format(self.worker_process.pid))
+      else:
+        logger.debug("Relaunch does not allowed, can not relaunch")
 
   def kill(self, reason=None, can_relaunch=True):
     """
@@ -339,43 +320,43 @@ class MultiProcessStatusCommandsExecutor(StatusCommandsExecutor):
     :param reason: reason of killing
     :return:
     """
-    logger.info("Killing child process reason:" + str(reason))
-    self.can_relaunch = can_relaunch
-
-    if not self.can_relaunch:
-      logger.info("Killing without possibility to relaunch...")
-
-    # try graceful stop, otherwise hard-kill
-    if self.worker_process and self.worker_process.is_alive():
-      self.mustDieEvent.set()
-      self.worker_process.join(timeout=3)
-      if self.worker_process.is_alive():
-        os.kill(self.worker_process.pid, signal.SIGKILL)
-        logger.info("Child process killed by -9")
+    with self.kill_lock:
+      self.can_relaunch = can_relaunch
+
+      if not self.can_relaunch:
+        logger.info("Killing without possibility to relaunch...")
+
+      # try graceful stop, otherwise hard-kill
+      if self.worker_process and self.worker_process.is_alive():
+        self.mustDieEvent.set()
+        self.worker_process.join(timeout=3)
+        if self.worker_process.is_alive():
+          os.kill(self.worker_process.pid, signal.SIGKILL)
+          logger.info("Child process killed by -9")
+        else:
+          # get log messages only if we died gracefully, otherwise we will have chance to block here forever, in most cases
+          # this call will do nothing, as all logs will be processed in ActionQueue loop
+          self._process_logs()
+          logger.info("Child process died gracefully")
       else:
-        # get log messages only if we died gracefully, otherwise we will have chance to block here forever, in most cases
-        # this call will do nothing, as all logs will be processed in ActionQueue loop
-        self._process_logs()
-        logger.info("Child process died gracefully")
-    else:
-      logger.info("Child process already dead")
-
-    # close queues and acquire usage lock
-    # closing both sides of pipes here, we need this hack in case of blocking on recv() call
-    self.mp_result_queue.close()
-    self.mp_result_queue._writer.close()
-    self.mp_result_logs.close()
-    self.mp_result_logs._writer.close()
-    self.mp_task_queue.close()
-    self.mp_task_queue._writer.close()
-
-    with self.usage_lock:
-      self.mp_result_queue.join_thread()
-      self.mp_result_queue = multiprocessing.Queue()
-      self.mp_task_queue.join_thread()
-      self.mp_task_queue = multiprocessing.Queue()
-      self.mp_result_logs.join_thread()
-      self.mp_result_logs = multiprocessing.Queue()
-      self.customServiceOrchestrator = self.actionQueue.customServiceOrchestrator
-      self.mustDieEvent.clear()
-      self.timedOutEvent.clear()
+        logger.info("Child process already dead")
+
+      # close queues and acquire usage lock
+      # closing both sides of pipes here, we need this hack in case of blocking on recv() call
+      self.mp_result_queue.close()
+      self.mp_result_queue._writer.close()
+      self.mp_result_logs.close()
+      self.mp_result_logs._writer.close()
+      self.mp_task_queue.close()
+      self.mp_task_queue._writer.close()
+
+      with self.usage_lock:
+        self.mp_result_queue.join_thread()
+        self.mp_result_queue = multiprocessing.Queue()
+        self.mp_task_queue.join_thread()
+        self.mp_task_queue = multiprocessing.Queue()
+        self.mp_result_logs.join_thread()
+        self.mp_result_logs = multiprocessing.Queue()
+        self.customServiceOrchestrator = self.actionQueue.customServiceOrchestrator
+        self.mustDieEvent.clear()
+        self.timedOutEvent.clear()

http://git-wip-us.apache.org/repos/asf/ambari/blob/ccf9edbc/ambari-agent/src/main/python/ambari_agent/main.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/main.py b/ambari-agent/src/main/python/ambari_agent/main.py
index 923c570..19c92b0 100644
--- a/ambari-agent/src/main/python/ambari_agent/main.py
+++ b/ambari-agent/src/main/python/ambari_agent/main.py
@@ -352,8 +352,9 @@ def run_threads(server_hostname, heartbeat_stop_callback):
   while controller.is_alive():
     time.sleep(0.1)
 
-    if controller.get_status_commands_executor().need_relaunch:
-      controller.get_status_commands_executor().relaunch("COMMAND_TIMEOUT_OR_KILLED")
+    need_relaunch, reason = controller.get_status_commands_executor().need_relaunch
+    if need_relaunch:
+      controller.get_status_commands_executor().relaunch(reason)
 
   controller.get_status_commands_executor().kill("AGENT_STOPPED", can_relaunch=False)
 


[34/41] ambari git commit: AMBARI-20672 - Cluster Merge At End Of Upgrade Creation Cascades Unnecessarily (part2) (jonathanhurley)

Posted by ao...@apache.org.
AMBARI-20672 - Cluster Merge At End Of Upgrade Creation Cascades Unnecessarily (part2) (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/259dc90f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/259dc90f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/259dc90f

Branch: refs/heads/branch-3.0-perf
Commit: 259dc90fc2851e9ebce02b3efc255eb69268324c
Parents: 64447e5
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Apr 4 17:14:33 2017 -0400
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../ambari/server/state/alerts/AlertStateChangedEventTest.java     | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/259dc90f/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java
index d4dbdc8..f507cab 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java
@@ -572,7 +572,7 @@ public class AlertStateChangedEventTest extends EasyMockSupport {
     EasyMock.reset(clusters);
 
     EasyMock.expect(clusters.getClusterById(EasyMock.anyLong())).andReturn(cluster).atLeastOnce();
-    EasyMock.expect(cluster.getUpgradeInProgress()).andReturn(null).anyTimes();
+    EasyMock.expect(cluster.getUpgradeInProgress()).andReturn(EasyMock.createMock(UpgradeEntity.class)).anyTimes();
     EasyMock.expect(cluster.isUpgradeSuspended()).andReturn(true).anyTimes();
   }
 


[12/41] ambari git commit: AMBARI-20640. Upgrade server-side actions should be performed only one time per group (ncole)

Posted by ao...@apache.org.
AMBARI-20640. Upgrade server-side actions should be performed only one time per group (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d972592b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d972592b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d972592b

Branch: refs/heads/branch-3.0-perf
Commit: d972592bb863bf31c45c9bfa844958d7e83af668
Parents: ccf9edb
Author: Nate Cole <nc...@hortonworks.com>
Authored: Thu Mar 30 20:53:17 2017 -0400
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../state/stack/upgrade/ColocatedGrouping.java  |  80 +++++++-
 .../state/stack/upgrade/TaskWrapperBuilder.java |  10 +-
 .../AmbariManagementControllerTest.java         |   2 +-
 .../ambari/server/state/UpgradeHelperTest.java  | 188 ++++++++++++++++++-
 .../upgrades/upgrade_multi_server_tasks.xml     |  88 +++++++++
 5 files changed, 352 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d972592b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java
index c939320..272264f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java
@@ -18,6 +18,7 @@
 package org.apache.ambari.server.state.stack.upgrade;
 
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -35,10 +36,14 @@ import org.apache.ambari.server.stack.HostsType;
 import org.apache.ambari.server.state.UpgradeContext;
 import org.apache.ambari.server.state.stack.UpgradePack.ProcessingComponent;
 import org.apache.ambari.server.state.stack.upgrade.StageWrapper.Type;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Predicate;
+import com.google.common.collect.Collections2;
 import com.google.gson.JsonArray;
 import com.google.gson.JsonObject;
 import com.google.gson.JsonPrimitive;
@@ -95,6 +100,7 @@ public class ColocatedGrouping extends Grouping {
 
         Map<String, List<TaskProxy>> targetMap = ((i++) < count) ? initialBatch : finalBatches;
         List<TaskProxy> targetList = targetMap.get(host);
+
         if (null == targetList) {
           targetList = new ArrayList<>();
           targetMap.put(host, targetList);
@@ -160,8 +166,26 @@ public class ColocatedGrouping extends Grouping {
      * {@inheritDoc}
      */
     @Override
-    public List<StageWrapper> build(UpgradeContext upgradeContext,
-        List<StageWrapper> stageWrappers) {
+    public List<StageWrapper> build(UpgradeContext upgradeContext, List<StageWrapper> stageWrappers) {
+
+      final List<Task> visitedServerSideTasks = new ArrayList<>();
+
+      // !!! predicate to ensure server-side tasks are executed once only per grouping
+      Predicate<Task> predicate = new Predicate<Task>() {
+        @Override
+        public boolean apply(Task input) {
+          if (visitedServerSideTasks.contains(input)) {
+            return false;
+          }
+
+          if (input.getType().isServerAction()) {
+            visitedServerSideTasks.add(input);
+          }
+
+          return true;
+        };
+      };
+
       List<StageWrapper> results = new ArrayList<>(stageWrappers);
 
       if (LOG.isDebugEnabled()) {
@@ -169,7 +193,7 @@ public class ColocatedGrouping extends Grouping {
         LOG.debug("RU final: {}", finalBatches);
       }
 
-      List<StageWrapper> befores = fromProxies(upgradeContext.getDirection(), initialBatch);
+      List<StageWrapper> befores = fromProxies(upgradeContext.getDirection(), initialBatch, predicate);
       results.addAll(befores);
 
       if (!befores.isEmpty()) {
@@ -189,13 +213,14 @@ public class ColocatedGrouping extends Grouping {
         results.add(wrapper);
       }
 
-      results.addAll(fromProxies(upgradeContext.getDirection(), finalBatches));
+      results.addAll(fromProxies(upgradeContext.getDirection(), finalBatches, predicate));
 
       return results;
     }
 
     private List<StageWrapper> fromProxies(Direction direction,
-        Map<String, List<TaskProxy>> wrappers) {
+        Map<String, List<TaskProxy>> wrappers, Predicate<Task> predicate) {
+
       List<StageWrapper> results = new ArrayList<>();
 
       Set<String> serviceChecks = new HashSet<>();
@@ -213,10 +238,27 @@ public class ColocatedGrouping extends Grouping {
 
           if (!t.restart) {
             if (null == wrapper) {
-              wrapper = new StageWrapper(t.type, t.message, t.getTasksArray());
+              TaskWrapper[] tasks = t.getTasksArray(predicate);
+
+              if (LOG.isDebugEnabled()) {
+                for (TaskWrapper tw : tasks) {
+                  LOG.debug("{}", tw);
+                }
+              }
+
+              if (ArrayUtils.isNotEmpty(tasks)) {
+                wrapper = new StageWrapper(t.type, t.message, tasks);
+              }
             }
           } else {
-            execwrappers.add(new StageWrapper(StageWrapper.Type.RESTART, t.message, t.getTasksArray()));
+            TaskWrapper[] tasks = t.getTasksArray(null);
+
+            if (LOG.isDebugEnabled()) {
+              for (TaskWrapper tw : tasks) {
+                LOG.debug("{}", tw);
+              }
+            }
+            execwrappers.add(new StageWrapper(StageWrapper.Type.RESTART, t.message, tasks));
           }
         }
 
@@ -345,8 +387,28 @@ public class ColocatedGrouping extends Grouping {
       return s;
     }
 
-    private TaskWrapper[] getTasksArray() {
-      return tasks.toArray(new TaskWrapper[0]);
+    /**
+     * Get the task wrappers for this proxy.  Server-side tasks cannot be executed more than
+     * one time per grouping.
+     * @param predicate the predicate to determine if a server-side task has already been added to a wrapper.
+     * @return the wrappers for a stage
+     */
+    private TaskWrapper[] getTasksArray(Predicate<Task> predicate) {
+      if (null == predicate) {
+        return tasks.toArray(new TaskWrapper[tasks.size()]);
+      }
+
+      List<TaskWrapper> interim = new ArrayList<>();
+
+      for (TaskWrapper wrapper : tasks) {
+        Collection<Task> filtered = Collections2.filter(wrapper.getTasks(), predicate);
+
+        if (CollectionUtils.isNotEmpty(filtered)) {
+          interim.add(wrapper);
+        }
+      }
+
+      return interim.toArray(new TaskWrapper[interim.size()]);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d972592b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java
index bd2bf14..a75fe00 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java
@@ -27,6 +27,7 @@ import java.util.Set;
 
 import org.apache.ambari.server.stack.HostsType;
 import org.apache.ambari.server.utils.StageUtils;
+import org.apache.commons.collections.CollectionUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -54,11 +55,10 @@ public class TaskWrapperBuilder {
 
     List<TaskWrapper> collection = new ArrayList<>();
     for (Task t : tasks) {
-      if (t.getType().equals(Task.Type.CONFIGURE) || t.getType().equals(Task.Type.MANUAL)) {
-        // only add the CONFIGURE/MANUAL task if there are actual hosts for the service/component
-        if (null != hostsType.hosts && !hostsType.hosts.isEmpty()) {
-          collection.add(new TaskWrapper(service, component, Collections.singleton(ambariServerHostname), params, t));
-        }
+
+      // only add the server-side task if there are actual hosts for the service/component
+      if (t.getType().isServerAction() && CollectionUtils.isNotEmpty(hostsType.hosts)) {
+        collection.add(new TaskWrapper(service, component, Collections.singleton(ambariServerHostname), params, t));
         continue;
       }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d972592b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 483880a..554e089 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -7310,7 +7310,7 @@ public class AmbariManagementControllerTest {
     Assert.assertEquals(1, responsesWithParams.size());
     StackVersionResponse resp = responsesWithParams.iterator().next();
     assertNotNull(resp.getUpgradePacks());
-    assertEquals(12, resp.getUpgradePacks().size());
+    assertEquals(13, resp.getUpgradePacks().size());
     assertTrue(resp.getUpgradePacks().contains("upgrade_test"));
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d972592b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
index 8e5ad0a..0dd7f58 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
@@ -601,7 +601,7 @@ public class UpgradeHelperTest {
     assertEquals(4, groups.get(0).items.size());
     assertEquals(8, groups.get(1).items.size());
     assertEquals(5, groups.get(2).items.size());
-    assertEquals(8, groups.get(3).items.size());
+    assertEquals(7, groups.get(3).items.size());
     assertEquals(8, groups.get(4).items.size());
   }
 
@@ -943,6 +943,7 @@ public class UpgradeHelperTest {
     Map<String, String> hiveConfigs = new HashMap<>();
     hiveConfigs.put("fooKey", "THIS-BETTER-CHANGE");
     hiveConfigs.put("ifFooKey", "ifFooValue");
+
     ConfigurationRequest configurationRequest = new ConfigurationRequest();
     configurationRequest.setClusterName(cluster.getClusterName());
     configurationRequest.setType("hive-site");
@@ -1870,6 +1871,191 @@ public class UpgradeHelperTest {
     assertTrue(groups.isEmpty());
   }
 
+  @Test
+  public void testMultipleServerTasks() throws Exception {
+
+    // !!! make a two node cluster with just ZK
+    Clusters clusters = injector.getInstance(Clusters.class);
+    ServiceFactory serviceFactory = injector.getInstance(ServiceFactory.class);
+
+    String clusterName = "c1";
+
+    StackId stackId = new StackId("HDP-2.1.1");
+    StackId stackId2 = new StackId("HDP-2.2.0");
+
+    clusters.addCluster(clusterName, stackId);
+    Cluster c = clusters.getCluster(clusterName);
+
+    helper.getOrCreateRepositoryVersion(stackId,
+        c.getDesiredStackVersion().getStackVersion());
+
+    helper.getOrCreateRepositoryVersion(stackId2,"2.2.0");
+
+    helper.getOrCreateRepositoryVersion(stackId2, UPGRADE_VERSION);
+
+    c.createClusterVersion(stackId,
+        c.getDesiredStackVersion().getStackVersion(), "admin",
+        RepositoryVersionState.INSTALLING);
+
+    for (int i = 0; i < 2; i++) {
+      String hostName = "h" + (i+1);
+      clusters.addHost(hostName);
+      Host host = clusters.getHost(hostName);
+
+      Map<String, String> hostAttributes = new HashMap<>();
+      hostAttributes.put("os_family", "redhat");
+      hostAttributes.put("os_release_version", "6");
+      host.setHostAttributes(hostAttributes);
+
+      clusters.mapHostToCluster(hostName, clusterName);
+    }
+
+    // !!! add services
+    c.addService(serviceFactory.createNew(c, "ZOOKEEPER"));
+
+    Service s = c.getService("ZOOKEEPER");
+    ServiceComponent sc = s.addServiceComponent("ZOOKEEPER_SERVER");
+    sc.addServiceComponentHost("h1");
+    sc.addServiceComponentHost("h2");
+
+    sc = s.addServiceComponent("ZOOKEEPER_CLIENT");
+    sc.addServiceComponentHost("h1");
+    sc.addServiceComponentHost("h2");
+
+    EasyMock.reset(m_masterHostResolver);
+
+    expect(m_masterHostResolver.getCluster()).andReturn(c).anyTimes();
+
+    HostsType type = new HostsType();
+    type.hosts.addAll(Arrays.asList("h1", "h2"));
+    expect(m_masterHostResolver.getMasterAndHosts("ZOOKEEPER", "ZOOKEEPER_SERVER")).andReturn(type).anyTimes();
+
+    type = new HostsType();
+    type.hosts.addAll(Arrays.asList("h1", "h2"));
+    expect(m_masterHostResolver.getMasterAndHosts("ZOOKEEPER", "ZOOKEEPER_CLIENT")).andReturn(type).anyTimes();
+
+
+    replay(m_masterHostResolver);
+
+    Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
+
+    ServiceInfo si = ambariMetaInfo.getService("HDP", "2.1.1", "ZOOKEEPER");
+    si.setDisplayName("Zk");
+    ComponentInfo ci = si.getComponentByName("ZOOKEEPER_SERVER");
+    ci.setDisplayName("ZooKeeper1 Server2");
+
+    UpgradePack upgrade = upgrades.get("upgrade_multi_server_tasks");
+    assertNotNull(upgrade);
+
+    UpgradeContext context = m_upgradeContextFactory.create(c, UpgradeType.NON_ROLLING,
+        Direction.UPGRADE, "2.2.0", new HashMap<String, Object>());
+    context.setResolver(m_masterHostResolver);
+
+    List<UpgradeGroupHolder> groups = m_upgradeHelper.createSequence(upgrade, context);
+
+    assertEquals(2, groups.size());
+
+
+    // zk server as a colocated grouping first.  XML says to run a manual, 2 configs, and an execute
+    UpgradeGroupHolder group1 = groups.get(0);
+    assertEquals(7, group1.items.size());
+
+    // Stage 1.  manual, 2 configs, execute
+    assertEquals(4, group1.items.get(0).getTasks().size());
+    TaskWrapper taskWrapper = group1.items.get(0).getTasks().get(0);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.MANUAL, taskWrapper.getTasks().get(0).getType());
+
+    taskWrapper = group1.items.get(0).getTasks().get(1);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.CONFIGURE, taskWrapper.getTasks().get(0).getType());
+
+    taskWrapper = group1.items.get(0).getTasks().get(2);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.CONFIGURE, taskWrapper.getTasks().get(0).getType());
+
+    taskWrapper = group1.items.get(0).getTasks().get(3);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.EXECUTE, taskWrapper.getTasks().get(0).getType());
+
+    // Stage 2. restart for h1
+    assertEquals(1, group1.items.get(1).getTasks().size());
+    taskWrapper = group1.items.get(1).getTasks().get(0);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.RESTART, taskWrapper.getTasks().get(0).getType());
+    assertTrue(taskWrapper.getHosts().contains("h1"));
+
+    // Stage 3. service check
+    assertEquals(1, group1.items.get(2).getTasks().size());
+    taskWrapper = group1.items.get(2).getTasks().get(0);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.SERVICE_CHECK, taskWrapper.getTasks().get(0).getType());
+
+    // stage 4. manual step for validation
+    assertEquals(1, group1.items.get(3).getTasks().size());
+    taskWrapper = group1.items.get(3).getTasks().get(0);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.MANUAL, taskWrapper.getTasks().get(0).getType());
+
+    // Stage 5. repeat execute as it's not a server-side task.  no configure or manual tasks
+    assertEquals(1, group1.items.get(4).getTasks().size());
+    taskWrapper = group1.items.get(4).getTasks().get(0);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.EXECUTE, taskWrapper.getTasks().get(0).getType());
+
+    // Stage 6. restart for h2.
+    assertEquals(1, group1.items.get(5).getTasks().size());
+    taskWrapper = group1.items.get(5).getTasks().get(0);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.RESTART, taskWrapper.getTasks().get(0).getType());
+    assertTrue(taskWrapper.getHosts().contains("h2"));
+
+    // Stage 7. service check
+    assertEquals(1, group1.items.get(6).getTasks().size());
+    taskWrapper = group1.items.get(6).getTasks().get(0);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.SERVICE_CHECK, taskWrapper.getTasks().get(0).getType());
+
+
+    // zk client
+    UpgradeGroupHolder group2 = groups.get(1);
+    assertEquals(5, group2.items.size());
+
+    // Stage 1. Configure
+    assertEquals(1, group2.items.get(0).getTasks().size());
+    taskWrapper = group2.items.get(0).getTasks().get(0);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.CONFIGURE, taskWrapper.getTasks().get(0).getType());
+
+    // Stage 2. Custom class
+    assertEquals(1, group2.items.get(1).getTasks().size());
+    taskWrapper = group2.items.get(1).getTasks().get(0);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.SERVER_ACTION, taskWrapper.getTasks().get(0).getType());
+
+    // Stage 3. Restart client on h1
+    assertEquals(1, group2.items.get(2).getTasks().size());
+    taskWrapper = group2.items.get(2).getTasks().get(0);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.RESTART, taskWrapper.getTasks().get(0).getType());
+
+    // Stage 4. Restart client on h2 (no configure or custom class)
+    assertEquals(1, group2.items.get(3).getTasks().size());
+    taskWrapper = group2.items.get(3).getTasks().get(0);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.RESTART, taskWrapper.getTasks().get(0).getType());
+
+    // Stage 5. service check
+    assertEquals(1, group2.items.get(4).getTasks().size());
+    taskWrapper = group2.items.get(4).getTasks().get(0);
+    assertEquals(1, taskWrapper.getTasks().size());
+    assertEquals(Task.Type.SERVICE_CHECK, taskWrapper.getTasks().get(0).getType());
+
+  }
+
+
+
+
   /**
    * Tests {@link UpgradeType#HOST_ORDERED}, specifically that the orchestration
    * can properly expand the single {@link HostOrderGrouping} and create the

http://git-wip-us.apache.org/repos/asf/ambari/blob/d972592b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_multi_server_tasks.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_multi_server_tasks.xml b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_multi_server_tasks.xml
new file mode 100644
index 0000000..de99d59
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_multi_server_tasks.xml
@@ -0,0 +1,88 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<upgrade xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="upgrade-pack.xsd">
+  <target>2.2.*.*</target>
+  <target-stack>HDP-2.2.0</target-stack>
+  <type>ROLLING</type>
+  <prerequisite-checks />
+  
+  <order>
+    <group xsi:type="colocated" name="ZOOKEEPER" title="Zookeeper">
+      <skippable>true</skippable>
+      <allow-retry>false</allow-retry>
+      <service name="ZOOKEEPER">
+        <component>ZOOKEEPER_SERVER</component>
+      </service>
+      
+      <batch>
+        <percent>20</percent>
+        <message>Please run additional tests on {{components}}</message>
+      </batch>
+      
+    </group>
+    
+    <group name="CLIENTS" title="Zookeeper Clients">
+      <skippable>true</skippable>
+      <allow-retry>false</allow-retry>
+      <service name="ZOOKEEPER">
+        <component>ZOOKEEPER_CLIENT</component>
+      </service>
+    </group>
+    
+  </order>
+  
+  <processing>
+    <service name="ZOOKEEPER">
+      <component name="ZOOKEEPER_SERVER">
+        <pre-upgrade>
+          <task xsi:type="manual">
+            <message>This is a manual task with a placeholder of {{foo/bar}}</message>
+          </task>
+          
+          <task xsi:type="configure" id="hdp_2_1_1_zookeeper_new_config_type" />
+          <task xsi:type="configure" id="hdp_2_1_1_zookeeper_new_config_type" />
+          
+         
+          <task xsi:type="execute">
+            <script>foo</script>
+            <function>list</function>
+          </task>
+         
+        </pre-upgrade>
+        <pre-downgrade copy-upgrade="true" />
+        <upgrade>
+          <task xsi:type="restart-task" />
+        </upgrade>
+      </component>
+      
+      <component name="ZOOKEEPER_CLIENT">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_1_1_zookeeper_new_config_type" />
+          <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath"/>
+        </pre-upgrade>
+        
+        <pre-downgrade />
+        
+        <upgrade>
+          <task xsi:type="restart-task" />
+        </upgrade>
+      </component>
+      
+    </service>
+  </processing>
+</upgrade>


[03/41] ambari git commit: AMBARI-20660. HiveView2.0 scrolling in query tab does not work properly for a longer query (pallavkul)

Posted by ao...@apache.org.
AMBARI-20660. HiveView2.0 scrolling in query tab does not work properly for a longer query (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c3361d90
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c3361d90
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c3361d90

Branch: refs/heads/branch-3.0-perf
Commit: c3361d90718cb3d6879f830132a67e3f0afa010c
Parents: f6fbe4b
Author: pallavkul <pa...@gmail.com>
Authored: Wed Apr 5 11:33:30 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 contrib/views/hive20/src/main/resources/ui/app/styles/app.scss | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c3361d90/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
index c06e65e..f4b63c5 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
@@ -973,3 +973,7 @@ rect.operator__box {
   bottom:10px;
   right:-15px;
 }
+
+.CodeMirror-scroll {
+  padding-bottom: 20px;
+}


[41/41] ambari git commit: AMBARI-20684. Implement a websocket adapter for stomp.py (aonishuk)

Posted by ao...@apache.org.
AMBARI-20684. Implement a websocket adapter for stomp.py (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8de3961b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8de3961b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8de3961b

Branch: refs/heads/branch-3.0-perf
Commit: 8de3961b62dcf23826e16134b9dd5c5c461ab50c
Parents: fb4637b
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Apr 6 12:33:00 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:33:00 2017 +0300

----------------------------------------------------------------------
 LICENSE.txt                                     |  28 +
 NOTICE.txt                                      |   5 +
 ambari-agent/conf/unix/install-helper.sh        |  11 +
 ambari-agent/pom.xml                            |   1 +
 .../main/python/ambari_agent/client_example.py  |  69 +++
 ambari-agent/src/packages/tarball/all.xml       |   5 +
 .../python/ambari_stomp/adapter/websocket.py    | 106 ++++
 .../src/main/python/ambari_ws4py/__init__.py    |  67 +++
 .../main/python/ambari_ws4py/client/__init__.py | 339 ++++++++++++
 .../python/ambari_ws4py/client/geventclient.py  |  92 ++++
 .../ambari_ws4py/client/threadedclient.py       |  98 ++++
 .../python/ambari_ws4py/client/tornadoclient.py | 155 ++++++
 .../src/main/python/ambari_ws4py/compat.py      |  46 ++
 .../src/main/python/ambari_ws4py/exc.py         |  27 +
 .../src/main/python/ambari_ws4py/framing.py     | 273 ++++++++++
 .../src/main/python/ambari_ws4py/manager.py     | 368 +++++++++++++
 .../src/main/python/ambari_ws4py/messaging.py   | 169 ++++++
 .../src/main/python/ambari_ws4py/streaming.py   | 319 +++++++++++
 .../main/python/ambari_ws4py/utf8validator.py   | 117 ++++
 .../src/main/python/ambari_ws4py/websocket.py   | 535 +++++++++++++++++++
 ambari-project/pom.xml                          | 100 +++-
 ambari-server/pom.xml                           |  82 ++-
 pom.xml                                         |   2 +
 23 files changed, 3009 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/LICENSE.txt
----------------------------------------------------------------------
diff --git a/LICENSE.txt b/LICENSE.txt
index f05016f..f2dc400 100644
--- a/LICENSE.txt
+++ b/LICENSE.txt
@@ -344,6 +344,34 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+For ws4py websocket python library.
+
+Copyright (c) 2011-2016, Sylvain Hellegouarch
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+   this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+ * Neither the name of ws4py nor the names of its contributors may be used
+   to endorse or promote products derived from this software without
+   specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
 
 For jQuery 1.9.1 (ambari-web/vendor/scripts.jquery-1.9.1.js):
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/NOTICE.txt
----------------------------------------------------------------------
diff --git a/NOTICE.txt b/NOTICE.txt
index 50f982c..7429765 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -28,4 +28,9 @@ Some rights reserved.
 This product includes Simplejson, library fast encoding and decoding of json. (https://github.com/simplejson/simplejson - MIT license)
 
 Copyright (c) 2006 Bob Ippolito.
+All rights reserved.
+
+Ws4py, python library for websocket connectivity
+
+Copyright (c) 2011-2016, Sylvain Hellegouarch
 All rights reserved.
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-agent/conf/unix/install-helper.sh
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/install-helper.sh b/ambari-agent/conf/unix/install-helper.sh
index 35aec15..a790160 100644
--- a/ambari-agent/conf/unix/install-helper.sh
+++ b/ambari-agent/conf/unix/install-helper.sh
@@ -23,6 +23,7 @@ RESOURCE_MANAGEMENT_DIR="/usr/lib/python2.6/site-packages/resource_management"
 JINJA_DIR="/usr/lib/python2.6/site-packages/ambari_jinja2"
 SIMPLEJSON_DIR="/usr/lib/python2.6/site-packages/ambari_simplejson"
 STOMP_DIR="/usr/lib/python2.6/site-packages/ambari_stomp"
+WS4PY_DIR="/usr/lib/python2.6/site-packages/ambari_ws4py"
 OLD_COMMON_DIR="/usr/lib/python2.6/site-packages/common_functions"
 INSTALL_HELPER_SERVER="/var/lib/ambari-server/install-helper.sh"
 COMMON_DIR_AGENT="/usr/lib/ambari-agent/lib/ambari_commons"
@@ -30,6 +31,7 @@ RESOURCE_MANAGEMENT_DIR_AGENT="/usr/lib/ambari-agent/lib/resource_management"
 JINJA_AGENT_DIR="/usr/lib/ambari-agent/lib/ambari_jinja2"
 SIMPLEJSON_AGENT_DIR="/usr/lib/ambari-agent/lib/ambari_simplejson"
 STOMP_AGENT_DIR="/usr/lib/ambari-agent/lib/ambari_stomp"
+WS4PY_AGENT_DIR="/usr/lib/ambari-agent/lib/ambari_ws4py"
 AMBARI_AGENT="/usr/lib/python2.6/site-packages/ambari_agent"
 PYTHON_WRAPER_TARGET="/usr/bin/ambari-python-wrap"
 AMBARI_AGENT_VAR="/var/lib/ambari-agent"
@@ -70,6 +72,11 @@ do_install(){
   if [ ! -d "$STOMP_DIR" ]; then
     ln -s "$STOMP_AGENT_DIR" "$STOMP_DIR"
   fi
+
+  # setting ws4py shared resource
+  if [ ! -d "$WS4PY_DIR" ]; then
+    ln -s "$WS4PY_AGENT_DIR" "$WS4PY_DIR"
+  fi
   
   # on nano Ubuntu, when umask=027 those folders are created without 'x' bit for 'others'.
   # which causes failures when hadoop users try to access tmp_dir
@@ -159,6 +166,10 @@ do_remove(){
     rm -f $STOMP_DIR
   fi
 
+  if [ -d "$WS4PY_DIR" ]; then
+    rm -f $WS4PY_DIR
+  fi
+
   if [ -d "$OLD_COMMON_DIR" ]; then
     rm -f $OLD_COMMON_DIR
   fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index 4807a35..c232947 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -42,6 +42,7 @@
     <jinja.install.dir>/usr/lib/ambari-agent/lib/ambari_jinja2</jinja.install.dir>
     <simplejson.install.dir>/usr/lib/ambari-agent/lib/ambari_simplejson</simplejson.install.dir>
     <stomp.install.dir>/usr/lib/ambari-agent/lib/ambari_stomp</stomp.install.dir>
+    <ws4py.install.dir>/usr/lib/ambari-agent/lib/ambari_ws4py</ws4py.install.dir>
     <lib.dir>/usr/lib/ambari-agent/lib</lib.dir>
     <deb.architecture>amd64</deb.architecture>
     <ambari.server.module>../ambari-server</ambari.server.module>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-agent/src/main/python/ambari_agent/client_example.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/client_example.py b/ambari-agent/src/main/python/ambari_agent/client_example.py
new file mode 100644
index 0000000..96e76be
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/client_example.py
@@ -0,0 +1,69 @@
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+# TODO: remove this
+
+import time
+import ambari_stomp
+from ambari_stomp.adapter import websocket
+import base64
+
+correlationId = 0
+
+def get_headers():
+  global correlationId
+  correlationId += 1
+  headers = {
+    "content-type": "text/plain",
+    "correlationId": correlationId
+  }
+  return headers
+
+class MyListener(ambari_stomp.ConnectionListener):
+  def on_message(self, headers, message):
+    print('MyListener:\nreceived a message "{0}"\n'.format(message))
+    global read_messages
+    print headers
+    print message
+    read_messages.append({'id': headers['message-id'], 'subscription':headers['subscription']})
+
+
+class MyStatsListener(ambari_stomp.StatsListener):
+  def on_disconnected(self):
+    super(MyStatsListener, self).on_disconnected()
+    print('MyStatsListener:\n{0}\n'.format(self))
+
+read_messages = []
+
+conn = websocket.WsConnection('ws://gc6401:8080/api/stomp/v1')
+conn.transport.ws.extra_headers = [("Authorization", "Basic " + base64.b64encode('admin:admin'))]
+conn.set_listener('my_listener', MyListener())
+conn.set_listener('stats_listener', MyStatsListener())
+conn.start()
+
+conn.connect(wait=True, headers=get_headers())
+
+conn.subscribe(destination='/user/', id='sub-0', ack='client-individual')
+
+#conn.send(body="", destination='/test/time', headers=get_headers())
+conn.send(body="some message", destination='/test/echo', headers=get_headers())
+time.sleep(1)
+for message in read_messages:
+  conn.ack(message['id'], message['subscription'])
+
+conn.disconnect()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-agent/src/packages/tarball/all.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/src/packages/tarball/all.xml b/ambari-agent/src/packages/tarball/all.xml
index a22f0bb..c830dd1 100644
--- a/ambari-agent/src/packages/tarball/all.xml
+++ b/ambari-agent/src/packages/tarball/all.xml
@@ -77,6 +77,11 @@
     </fileSet>
     <fileSet>
       <directoryMode>755</directoryMode>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_ws4py</directory>
+      <outputDirectory>${ws4py.install.dir}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
       <directory>src/examples</directory>
       <outputDirectory>${lib.dir}/examples</outputDirectory>
     </fileSet>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_stomp/adapter/websocket.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_stomp/adapter/websocket.py b/ambari-common/src/main/python/ambari_stomp/adapter/websocket.py
new file mode 100644
index 0000000..8416a27
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_stomp/adapter/websocket.py
@@ -0,0 +1,106 @@
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import copy
+import logging
+
+from Queue import Queue
+
+from ambari_stomp.connect import BaseConnection
+from ambari_stomp.protocol import Protocol12
+from ambari_stomp.transport import Transport, DEFAULT_SSL_VERSION
+
+from ambari_ws4py.client.threadedclient import WebSocketClient
+
+logger = logging.getLogger(__name__)
+
+class QueuedWebSocketClient(WebSocketClient):
+  def __init__(self, *args, **kwargs):
+    WebSocketClient.__init__(self, *args, **kwargs)
+    self.messages = Queue()
+
+  def received_message(self, message):
+    """
+    Override the base class to store the incoming message
+    in the `messages` queue.
+    """
+    self.messages.put(copy.deepcopy(message))
+
+  def receive(self):
+    """
+    Returns messages that were stored into the
+    `messages` queue and returns `None` when the
+    websocket is terminated or closed.
+    """
+    # If the websocket was terminated and there are no messages
+    # left in the queue, return None immediately otherwise the client
+    # will block forever
+    if self.terminated and self.messages.empty():
+      return None
+    message = self.messages.get()
+    if message is StopIteration:
+      return None
+    return message
+
+  def closed(self, code, reason=None):
+    self.messages.put(StopIteration)
+
+class WsTransport(Transport):
+  def __init__(self, url):
+    Transport.__init__(self, (0, 0), False, False, 0.0, 0.0, 0.0, 0.0, 0, False, None, None, None, None, False,
+    DEFAULT_SSL_VERSION, None, None, None)
+    self.current_host_and_port = (0, 0) # mocking
+    self.ws = QueuedWebSocketClient(url, protocols=['http-only', 'chat'])
+    self.ws.daemon = False
+
+  def is_connected(self):
+    return self.connected
+
+  def attempt_connection(self):
+    self.ws.connect()
+
+  def send(self, encoded_frame):
+    logger.debug("Outgoing STOMP message:\n>>> " + encoded_frame)
+    self.ws.send(encoded_frame)
+
+  def receive(self):
+    try:
+      msg = str(self.ws.receive())
+      logger.debug("Incoming STOMP message:\n<<< " + msg)
+      return msg
+    except:
+      # exceptions from this method are hidden by the framework so implementing logging by ourselves
+      logger.exception("Exception while handling incoming STOMP message:")
+    return None
+
+  def stop(self):
+    self.running = False
+    self.ws.close_connection()
+    self.disconnect_socket()
+    Transport.stop(self)
+
+class WsConnection(BaseConnection, Protocol12):
+  def __init__(self, url, wait_on_receipt=False):
+    self.transport = WsTransport(url)
+    self.transport.set_listener('ws-listener', self)
+    self.transactions = {}
+    Protocol12.__init__(self, self.transport, (0, 0))
+
+  def disconnect(self, receipt=None, headers=None, **keyword_headers):
+    Protocol12.disconnect(self, receipt, headers, **keyword_headers)
+    self.transport.stop()

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/__init__.py b/ambari-common/src/main/python/ambari_ws4py/__init__.py
new file mode 100644
index 0000000..81d30b5
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/__init__.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of ambari_ws4py nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import logging
+import logging.handlers as handlers
+
+__author__ = "Sylvain Hellegouarch"
+__version__ = "0.4.2"
+__all__ = ['WS_KEY', 'WS_VERSION', 'configure_logger', 'format_addresses']
+
+WS_KEY = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
+WS_VERSION = (8, 13)
+
+def configure_logger(stdout=True, filepath=None, level=logging.INFO):
+    logger = logging.getLogger('ambari_ws4py')
+    logger.setLevel(level)
+    logfmt = logging.Formatter("[%(asctime)s] %(levelname)s %(message)s")
+
+    if filepath:
+        h = handlers.RotatingFileHandler(filepath, maxBytes=10485760, backupCount=3)
+        h.setLevel(level)
+        h.setFormatter(logfmt)
+        logger.addHandler(h)
+
+    if stdout:
+        import sys
+        h = logging.StreamHandler(sys.stdout)
+        h.setLevel(level)
+        h.setFormatter(logfmt)
+        logger.addHandler(h)
+
+    return logger
+
+def format_addresses(ws):
+    me = ws.local_address
+    peer = ws.peer_address
+    if isinstance(me, tuple) and isinstance(peer, tuple):
+        me_ip, me_port = ws.local_address
+        peer_ip, peer_port = ws.peer_address
+        return "[Local => %s:%d | Remote => %s:%d]" % (me_ip, me_port, peer_ip, peer_port)
+
+    return "[Bound to '%s']" % me

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/client/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/client/__init__.py b/ambari-common/src/main/python/ambari_ws4py/client/__init__.py
new file mode 100644
index 0000000..89598ab
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/client/__init__.py
@@ -0,0 +1,339 @@
+# -*- coding: utf-8 -*-
+from base64 import b64encode
+from hashlib import sha1
+import os
+import socket
+import ssl
+
+from ambari_ws4py import WS_KEY, WS_VERSION
+from ambari_ws4py.exc import HandshakeError
+from ambari_ws4py.websocket import WebSocket
+from ambari_ws4py.compat import urlsplit
+
+__all__ = ['WebSocketBaseClient']
+
+class WebSocketBaseClient(WebSocket):
+    def __init__(self, url, protocols=None, extensions=None,
+                 heartbeat_freq=None, ssl_options=None, headers=None):
+        """
+        A websocket client that implements :rfc:`6455` and provides a simple
+        interface to communicate with a websocket server.
+
+        This class works on its own but will block if not run in
+        its own thread.
+
+        When an instance of this class is created, a :py:mod:`socket`
+        is created. If the connection is a TCP socket,
+        the nagle's algorithm is disabled.
+
+        The address of the server will be extracted from the given
+        websocket url.
+
+        The websocket key is randomly generated, reset the
+        `key` attribute if you want to provide yours.
+
+        For instance to create a TCP client:
+
+        .. code-block:: python
+
+           >>> from websocket.client import WebSocketBaseClient
+           >>> ws = WebSocketBaseClient('ws://localhost/ws')
+
+
+        Here is an example for a TCP client over SSL:
+
+        .. code-block:: python
+
+           >>> from websocket.client import WebSocketBaseClient
+           >>> ws = WebSocketBaseClient('wss://localhost/ws')
+
+
+        Finally an example of a Unix-domain connection:
+
+        .. code-block:: python
+
+           >>> from websocket.client import WebSocketBaseClient
+           >>> ws = WebSocketBaseClient('ws+unix:///tmp/my.sock')
+
+        Note that in this case, the initial Upgrade request
+        will be sent to ``/``. You may need to change this
+        by setting the resource explicitely before connecting:
+
+        .. code-block:: python
+
+           >>> from websocket.client import WebSocketBaseClient
+           >>> ws = WebSocketBaseClient('ws+unix:///tmp/my.sock')
+           >>> ws.resource = '/ws'
+           >>> ws.connect()
+
+        You may provide extra headers by passing a list of tuples
+        which must be unicode objects.
+
+        """
+        self.url = url
+        self.host = None
+        self.scheme = None
+        self.port = None
+        self.unix_socket_path = None
+        self.resource = None
+        self.ssl_options = ssl_options or {}
+        self.extra_headers = headers or []
+
+        if self.scheme == "wss":
+            # Prevent check_hostname requires server_hostname (ref #187)
+            if "cert_reqs" not in self.ssl_options:
+                self.ssl_options["cert_reqs"] = ssl.CERT_NONE
+
+        self._parse_url()
+
+        if self.unix_socket_path:
+            sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0)
+        else:
+            # Let's handle IPv4 and IPv6 addresses
+            # Simplified from CherryPy's code
+            try:
+                family, socktype, proto, canonname, sa = socket.getaddrinfo(self.host, self.port,
+                                                                            socket.AF_UNSPEC,
+                                                                            socket.SOCK_STREAM,
+                                                                            0, socket.AI_PASSIVE)[0]
+            except socket.gaierror:
+                family = socket.AF_INET
+                if self.host.startswith('::'):
+                    family = socket.AF_INET6
+
+                socktype = socket.SOCK_STREAM
+                proto = 0
+                canonname = ""
+                sa = (self.host, self.port, 0, 0)
+
+            sock = socket.socket(family, socktype, proto)
+            sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+            sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+            if hasattr(socket, 'AF_INET6') and family == socket.AF_INET6 and \
+              self.host.startswith('::'):
+                try:
+                    sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
+                except (AttributeError, socket.error):
+                    pass
+
+        WebSocket.__init__(self, sock, protocols=protocols,
+                           extensions=extensions,
+                           heartbeat_freq=heartbeat_freq)
+
+        self.stream.always_mask = True
+        self.stream.expect_masking = False
+        self.key = b64encode(os.urandom(16))
+
+    # Adpated from: https://github.com/liris/websocket-client/blob/master/websocket.py#L105
+    def _parse_url(self):
+        """
+        Parses a URL which must have one of the following forms:
+
+        - ws://host[:port][path]
+        - wss://host[:port][path]
+        - ws+unix:///path/to/my.socket
+
+        In the first two cases, the ``host`` and ``port``
+        attributes will be set to the parsed values. If no port
+        is explicitely provided, it will be either 80 or 443
+        based on the scheme. Also, the ``resource`` attribute is
+        set to the path segment of the URL (alongside any querystring).
+
+        In addition, if the scheme is ``ws+unix``, the
+        ``unix_socket_path`` attribute is set to the path to
+        the Unix socket while the ``resource`` attribute is
+        set to ``/``.
+        """
+        # Python 2.6.1 and below don't parse ws or wss urls properly. netloc is empty.
+        # See: https://github.com/Lawouach/WebSocket-for-Python/issues/59
+        scheme, url = self.url.split(":", 1)
+
+        parsed = urlsplit(url, scheme="http")
+        if parsed.hostname:
+            self.host = parsed.hostname
+        elif '+unix' in scheme:
+            self.host = 'localhost'
+        else:
+            raise ValueError("Invalid hostname from: %s", self.url)
+
+        if parsed.port:
+            self.port = parsed.port
+
+        if scheme == "ws":
+            if not self.port:
+                self.port = 80
+        elif scheme == "wss":
+            if not self.port:
+                self.port = 443
+        elif scheme in ('ws+unix', 'wss+unix'):
+            pass
+        else:
+            raise ValueError("Invalid scheme: %s" % scheme)
+
+        if parsed.path:
+            resource = parsed.path
+        else:
+            resource = "/"
+
+        if '+unix' in scheme:
+            self.unix_socket_path = resource
+            resource = '/'
+
+        if parsed.query:
+            resource += "?" + parsed.query
+
+        self.scheme = scheme
+        self.resource = resource
+
+    @property
+    def bind_addr(self):
+        """
+        Returns the Unix socket path if or a tuple
+        ``(host, port)`` depending on the initial
+        URL's scheme.
+        """
+        return self.unix_socket_path or (self.host, self.port)
+
+    def close(self, code=1000, reason=''):
+        """
+        Initiate the closing handshake with the server.
+        """
+        if not self.client_terminated:
+            self.client_terminated = True
+            self._write(self.stream.close(code=code, reason=reason).single(mask=True))
+
+    def connect(self):
+        """
+        Connects this websocket and starts the upgrade handshake
+        with the remote endpoint.
+        """
+        if self.scheme == "wss":
+            # default port is now 443; upgrade self.sender to send ssl
+            self.sock = ssl.wrap_socket(self.sock, **self.ssl_options)
+            self._is_secure = True
+            
+        self.sock.connect(self.bind_addr)
+
+        self._write(self.handshake_request)
+
+        response = b''
+        doubleCLRF = b'\r\n\r\n'
+        while True:
+            bytes = self.sock.recv(128)
+            if not bytes:
+                break
+            response += bytes
+            if doubleCLRF in response:
+                break
+
+        if not response:
+            self.close_connection()
+            raise HandshakeError("Invalid response")
+
+        headers, _, body = response.partition(doubleCLRF)
+        response_line, _, headers = headers.partition(b'\r\n')
+
+        try:
+            self.process_response_line(response_line)
+            self.protocols, self.extensions = self.process_handshake_header(headers)
+        except HandshakeError:
+            self.close_connection()
+            raise
+
+        self.handshake_ok()
+        if body:
+            self.process(body)
+
+    @property
+    def handshake_headers(self):
+        """
+        List of headers appropriate for the upgrade
+        handshake.
+        """
+        headers = [
+            ('Host', '%s:%s' % (self.host, self.port)),
+            ('Connection', 'Upgrade'),
+            ('Upgrade', 'websocket'),
+            ('Sec-WebSocket-Key', self.key.decode('utf-8')),
+            ('Sec-WebSocket-Version', str(max(WS_VERSION)))
+            ]
+        
+        if self.protocols:
+            headers.append(('Sec-WebSocket-Protocol', ','.join(self.protocols)))
+
+        if self.extra_headers:
+            headers.extend(self.extra_headers)
+
+        if not any(x for x in headers if x[0].lower() == 'origin'):
+
+            scheme, url = self.url.split(":", 1)
+            parsed = urlsplit(url, scheme="http")
+            if parsed.hostname:
+                self.host = parsed.hostname
+            else:
+                self.host = 'localhost'
+            origin = scheme + '://' + self.host
+            if parsed.port:
+                origin = origin + ':' + str(parsed.port)
+            headers.append(('Origin', origin))
+
+        return headers
+
+    @property
+    def handshake_request(self):
+        """
+        Prepare the request to be sent for the upgrade handshake.
+        """
+        headers = self.handshake_headers
+        request = [("GET %s HTTP/1.1" % self.resource).encode('utf-8')]
+        for header, value in headers:
+            request.append(("%s: %s" % (header, value)).encode('utf-8'))
+        request.append(b'\r\n')
+
+        return b'\r\n'.join(request)
+
+    def process_response_line(self, response_line):
+        """
+        Ensure that we received a HTTP `101` status code in
+        response to our request and if not raises :exc:`HandshakeError`.
+        """
+        protocol, code, status = response_line.split(b' ', 2)
+        if code != b'101':
+            raise HandshakeError("Invalid response status: %s %s" % (code, status))
+
+    def process_handshake_header(self, headers):
+        """
+        Read the upgrade handshake's response headers and
+        validate them against :rfc:`6455`.
+        """
+        protocols = []
+        extensions = []
+
+        headers = headers.strip()
+
+        for header_line in headers.split(b'\r\n'):
+            header, value = header_line.split(b':', 1)
+            header = header.strip().lower()
+            value = value.strip().lower()
+
+            if header == b'upgrade' and value != b'websocket':
+                raise HandshakeError("Invalid Upgrade header: %s" % value)
+
+            elif header == b'connection' and value != b'upgrade':
+                raise HandshakeError("Invalid Connection header: %s" % value)
+
+            elif header == b'sec-websocket-accept':
+                match = b64encode(sha1(self.key + WS_KEY).digest())
+                if value != match.lower():
+                    raise HandshakeError("Invalid challenge response: %s" % value)
+
+            elif header == b'sec-websocket-protocol':
+                protocols = ','.join(value)
+
+            elif header == b'sec-websocket-extensions':
+                extensions = ','.join(value)
+
+        return protocols, extensions
+
+    def handshake_ok(self):
+        self.opened()

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/client/geventclient.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/client/geventclient.py b/ambari-common/src/main/python/ambari_ws4py/client/geventclient.py
new file mode 100644
index 0000000..a1527b0
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/client/geventclient.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+import copy
+
+import gevent
+from gevent import Greenlet
+from gevent.queue import Queue
+
+from ambari_ws4py.client import WebSocketBaseClient
+
+__all__ = ['WebSocketClient']
+
+class WebSocketClient(WebSocketBaseClient):
+    def __init__(self, url, protocols=None, extensions=None, heartbeat_freq=None, ssl_options=None, headers=None):
+        """
+        WebSocket client that executes the
+        :meth:`run() <ambari_ws4py.websocket.WebSocket.run>` into a gevent greenlet.
+
+        .. code-block:: python
+
+          ws = WebSocketClient('ws://localhost:9000/echo', protocols=['http-only', 'chat'])
+          ws.connect()
+
+          ws.send("Hello world")
+
+          def incoming():
+            while True:
+               m = ws.receive()
+               if m is not None:
+                  print str(m)
+               else:
+                  break
+
+          def outgoing():
+            for i in range(0, 40, 5):
+               ws.send("*" * i)
+
+          greenlets = [
+             gevent.spawn(incoming),
+             gevent.spawn(outgoing),
+          ]
+          gevent.joinall(greenlets)
+        """
+        WebSocketBaseClient.__init__(self, url, protocols, extensions, heartbeat_freq,
+                                     ssl_options=ssl_options, headers=headers)
+        self._th = Greenlet(self.run)
+
+        self.messages = Queue()
+        """
+        Queue that will hold received messages.
+        """
+
+    def handshake_ok(self):
+        """
+        Called when the upgrade handshake has completed
+        successfully.
+
+        Starts the client's thread.
+        """
+        self._th.start()
+
+    def received_message(self, message):
+        """
+        Override the base class to store the incoming message
+        in the `messages` queue.
+        """
+        self.messages.put(copy.deepcopy(message))
+
+    def closed(self, code, reason=None):
+        """
+        Puts a :exc:`StopIteration` as a message into the
+        `messages` queue.
+        """
+        # When the connection is closed, put a StopIteration
+        # on the message queue to signal there's nothing left
+        # to wait for
+        self.messages.put(StopIteration)
+
+    def receive(self):
+        """
+        Returns messages that were stored into the
+        `messages` queue and returns `None` when the
+        websocket is terminated or closed.
+        """
+        # If the websocket was terminated and there are no messages
+        # left in the queue, return None immediately otherwise the client
+        # will block forever
+        if self.terminated and self.messages.empty():
+            return None
+        message = self.messages.get()
+        if message is StopIteration:
+            return None
+        return message

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/client/threadedclient.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/client/threadedclient.py b/ambari-common/src/main/python/ambari_ws4py/client/threadedclient.py
new file mode 100644
index 0000000..c6ebbbc
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/client/threadedclient.py
@@ -0,0 +1,98 @@
+# -*- coding: utf-8 -*-
+import threading
+
+from ambari_ws4py.client import WebSocketBaseClient
+
+__all__ = ['WebSocketClient']
+
+class WebSocketClient(WebSocketBaseClient):
+    def __init__(self, url, protocols=None, extensions=None, heartbeat_freq=None,
+                 ssl_options=None, headers=None):
+        """
+        .. code-block:: python
+
+           from ambari_ws4py.client.threadedclient import WebSocketClient
+
+           class EchoClient(WebSocketClient):
+               def opened(self):
+                  for i in range(0, 200, 25):
+                     self.send("*" * i)
+
+               def closed(self, code, reason):
+                  print(("Closed down", code, reason))
+
+               def received_message(self, m):
+                  print("=> %d %s" % (len(m), str(m)))
+
+           try:
+               ws = EchoClient('ws://localhost:9000/echo', protocols=['http-only', 'chat'])
+               ws.connect()
+           except KeyboardInterrupt:
+              ws.close()
+
+        """
+        WebSocketBaseClient.__init__(self, url, protocols, extensions, heartbeat_freq,
+                                     ssl_options, headers=headers)
+        self._th = threading.Thread(target=self.run, name='WebSocketClient')
+        self._th.daemon = True
+
+    @property
+    def daemon(self):
+        """
+        `True` if the client's thread is set to be a daemon thread.
+        """
+        return self._th.daemon
+
+    @daemon.setter
+    def daemon(self, flag):
+        """
+        Set to `True` if the client's thread should be a daemon.
+        """
+        self._th.daemon = flag
+
+    def run_forever(self):
+        """
+        Simply blocks the thread until the
+        websocket has terminated.
+        """
+        while not self.terminated:
+            self._th.join(timeout=0.1)
+
+    def handshake_ok(self):
+        """
+        Called when the upgrade handshake has completed
+        successfully.
+
+        Starts the client's thread.
+        """
+        self._th.start()
+
+if __name__ == '__main__':
+    from ambari_ws4py.client.threadedclient import WebSocketClient
+
+    class EchoClient(WebSocketClient):
+        def opened(self):
+            def data_provider():
+                for i in range(0, 200, 25):
+                    yield "#" * i
+
+            self.send(data_provider())
+
+            for i in range(0, 200, 25):
+                self.send("*" * i)
+
+        def closed(self, code, reason):
+            print(("Closed down", code, reason))
+
+        def received_message(self, m):
+            print("#%d" % len(m))
+            if len(m) == 175:
+                self.close(reason='bye bye')
+
+    try:
+        ws = EchoClient('ws://localhost:9000/ws', protocols=['http-only', 'chat'],
+                        headers=[('X-Test', 'hello there')])
+        ws.connect()
+        ws.run_forever()
+    except KeyboardInterrupt:
+        ws.close()

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/client/tornadoclient.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/client/tornadoclient.py b/ambari-common/src/main/python/ambari_ws4py/client/tornadoclient.py
new file mode 100644
index 0000000..b99cc54
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/client/tornadoclient.py
@@ -0,0 +1,155 @@
+# -*- coding: utf-8 -*-
+import ssl
+
+from tornado import iostream, escape
+from ambari_ws4py.client import WebSocketBaseClient
+from ambari_ws4py.exc import HandshakeError
+
+__all__ = ['TornadoWebSocketClient']
+
+class TornadoWebSocketClient(WebSocketBaseClient):
+    def __init__(self, url, protocols=None, extensions=None,
+                 io_loop=None, ssl_options=None, headers=None):
+        """
+        .. code-block:: python
+
+            from tornado import ioloop
+
+            class MyClient(TornadoWebSocketClient):
+                def opened(self):
+                    for i in range(0, 200, 25):
+                        self.send("*" * i)
+
+                def received_message(self, m):
+                    print((m, len(str(m))))
+
+                def closed(self, code, reason=None):
+                    ioloop.IOLoop.instance().stop()
+
+            ws = MyClient('ws://localhost:9000/echo', protocols=['http-only', 'chat'])
+            ws.connect()
+
+            ioloop.IOLoop.instance().start()
+        """
+        WebSocketBaseClient.__init__(self, url, protocols, extensions,
+                                     ssl_options=ssl_options, headers=headers)
+        if self.scheme == "wss":
+            self.sock = ssl.wrap_socket(self.sock, do_handshake_on_connect=False, **self.ssl_options)
+            self._is_secure = True
+            self.io = iostream.SSLIOStream(self.sock, io_loop, ssl_options=self.ssl_options)
+        else:
+            self.io = iostream.IOStream(self.sock, io_loop)
+        self.io_loop = io_loop
+
+    def connect(self):
+        """
+        Connects the websocket and initiate the upgrade handshake.
+        """
+        self.io.set_close_callback(self.__connection_refused)
+        self.io.connect((self.host, int(self.port)), self.__send_handshake)
+
+    def _write(self, b):
+        """
+        Trying to prevent a write operation
+        on an already closed websocket stream.
+
+        This cannot be bullet proof but hopefully
+        will catch almost all use cases.
+        """
+        if self.terminated:
+            raise RuntimeError("Cannot send on a terminated websocket")
+
+        self.io.write(b)
+
+    def __connection_refused(self, *args, **kwargs):
+        self.server_terminated = True
+        self.closed(1005, 'Connection refused')
+
+    def __send_handshake(self):
+        self.io.set_close_callback(self.__connection_closed)
+        self.io.write(escape.utf8(self.handshake_request),
+                      self.__handshake_sent)
+
+    def __connection_closed(self, *args, **kwargs):
+        self.server_terminated = True
+        self.closed(1006, 'Connection closed during handshake')
+
+    def __handshake_sent(self):
+        self.io.read_until(b"\r\n\r\n", self.__handshake_completed)
+
+    def __handshake_completed(self, data):
+        self.io.set_close_callback(None)
+        try:
+            response_line, _, headers = data.partition(b'\r\n')
+            self.process_response_line(response_line)
+            protocols, extensions = self.process_handshake_header(headers)
+        except HandshakeError:
+            self.close_connection()
+            raise
+
+        self.opened()
+        self.io.set_close_callback(self.__stream_closed)
+        self.io.read_bytes(self.reading_buffer_size, self.__fetch_more)
+
+    def __fetch_more(self, bytes):
+        try:
+            should_continue = self.process(bytes)
+        except:
+            should_continue = False
+
+        if should_continue:
+            self.io.read_bytes(self.reading_buffer_size, self.__fetch_more)
+        else:
+            self.__gracefully_terminate()
+
+    def __gracefully_terminate(self):
+        self.client_terminated = self.server_terminated = True
+
+        try:
+            if not self.stream.closing:
+                self.closed(1006)
+        finally:
+            self.close_connection()
+
+    def __stream_closed(self, *args, **kwargs):
+        self.io.set_close_callback(None)
+        code = 1006
+        reason = None
+        if self.stream.closing:
+            code, reason = self.stream.closing.code, self.stream.closing.reason
+        self.closed(code, reason)
+        self.stream._cleanup()
+
+    def close_connection(self):
+        """
+        Close the underlying connection
+        """
+        self.io.close()
+
+if __name__ == '__main__':
+    from tornado import ioloop
+
+    class MyClient(TornadoWebSocketClient):
+        def opened(self):
+            def data_provider():
+                for i in range(0, 200, 25):
+                    yield "#" * i
+
+            self.send(data_provider())
+
+            for i in range(0, 200, 25):
+                self.send("*" * i)
+
+        def received_message(self, m):
+            print("#%d" % len(m))
+            if len(m) == 175:
+                self.close()
+
+        def closed(self, code, reason=None):
+            ioloop.IOLoop.instance().stop()
+            print(("Closed down", code, reason))
+
+    ws = MyClient('ws://localhost:9000/ws', protocols=['http-only', 'chat'])
+    ws.connect()
+
+    ioloop.IOLoop.instance().start()

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/compat.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/compat.py b/ambari-common/src/main/python/ambari_ws4py/compat.py
new file mode 100644
index 0000000..e5c299c
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/compat.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+__doc__ = """
+This compatibility module is inspired by the one found
+in CherryPy. It provides a common entry point for the various
+functions and types that are used with ambari_ws4py but which
+differ from Python 2.x to Python 3.x
+
+There are likely better ways for some of them so feel
+free to provide patches.
+
+Note this has been tested against 2.7 and 3.3 only but
+should hopefully work fine with other versions too.
+"""
+import sys
+
+if sys.version_info >= (3, 0):
+    py3k = True
+    from urllib.parse import urlsplit
+    range = range
+    unicode = str
+    basestring = (bytes, str)
+    _ord = ord
+
+    def get_connection(fileobj):
+        return fileobj.raw._sock
+
+    def detach_connection(fileobj):
+        fileobj.detach()
+
+    def ord(c):
+        if isinstance(c, int):
+            return c
+        return _ord(c)
+else:
+    py3k = False
+    from urlparse import urlsplit
+    range = xrange
+    unicode = unicode
+    basestring = basestring
+    ord = ord
+
+    def get_connection(fileobj):
+        return fileobj._sock
+
+    def detach_connection(fileobj):
+        fileobj._sock = None

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/exc.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/exc.py b/ambari-common/src/main/python/ambari_ws4py/exc.py
new file mode 100644
index 0000000..bfefea4
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/exc.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+__all__ = ['WebSocketException', 'FrameTooLargeException', 'ProtocolException',
+           'UnsupportedFrameTypeException', 'TextFrameEncodingException',
+           'UnsupportedFrameTypeException', 'TextFrameEncodingException',
+           'StreamClosed', 'HandshakeError', 'InvalidBytesError']
+
+class WebSocketException(Exception): pass
+
+class ProtocolException(WebSocketException): pass
+
+class FrameTooLargeException(WebSocketException): pass
+
+class UnsupportedFrameTypeException(WebSocketException): pass
+
+class TextFrameEncodingException(WebSocketException): pass
+
+class InvalidBytesError(WebSocketException): pass
+
+class StreamClosed(Exception): pass
+
+class HandshakeError(WebSocketException):
+    def __init__(self, msg):
+        self.msg = msg
+
+    def __str__(self):
+        return self.msg

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/framing.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/framing.py b/ambari-common/src/main/python/ambari_ws4py/framing.py
new file mode 100644
index 0000000..a7f62c8
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/framing.py
@@ -0,0 +1,273 @@
+# -*- coding: utf-8 -*-
+from struct import pack, unpack
+
+from ambari_ws4py.exc import FrameTooLargeException, ProtocolException
+from ambari_ws4py.compat import py3k, ord, range
+
+# Frame opcodes defined in the spec.
+OPCODE_CONTINUATION = 0x0
+OPCODE_TEXT = 0x1
+OPCODE_BINARY = 0x2
+OPCODE_CLOSE = 0x8
+OPCODE_PING = 0x9
+OPCODE_PONG = 0xa
+
+__all__ = ['Frame']
+
+class Frame(object):
+    def __init__(self, opcode=None, body=b'', masking_key=None, fin=0, rsv1=0, rsv2=0, rsv3=0):
+        """
+        Implements the framing protocol as defined by RFC 6455.
+
+        .. code-block:: python
+           :linenos:
+
+           >>> test_mask = 'XXXXXX' # perhaps from os.urandom(4)
+           >>> f = Frame(OPCODE_TEXT, 'hello world', masking_key=test_mask, fin=1)
+           >>> bytes = f.build()
+           >>> bytes.encode('hex')
+           '818bbe04e66ad6618a06d1249105cc6882'
+           >>> f = Frame()
+           >>> f.parser.send(bytes[0])
+           1
+           >>> f.parser.send(bytes[1])
+           4
+
+        .. seealso:: Data Framing http://tools.ietf.org/html/rfc6455#section-5.2
+        """
+        if not isinstance(body, bytes):
+            raise TypeError("The body must be properly encoded")
+
+        self.opcode = opcode
+        self.body = body
+        self.masking_key = masking_key
+        self.fin = fin
+        self.rsv1 = rsv1
+        self.rsv2 = rsv2
+        self.rsv3 = rsv3
+        self.payload_length = len(body)
+
+        self._parser = None
+
+    @property
+    def parser(self):
+        if self._parser is None:
+            self._parser = self._parsing()
+            # Python generators must be initialized once.
+            next(self.parser)
+        return self._parser
+
+    def _cleanup(self):
+        if self._parser:
+            self._parser.close()
+            self._parser = None
+
+    def build(self):
+        """
+        Builds a frame from the instance's attributes and returns
+        its bytes representation.
+        """
+        header = b''
+
+        if self.fin > 0x1:
+            raise ValueError('FIN bit parameter must be 0 or 1')
+
+        if 0x3 <= self.opcode <= 0x7 or 0xB <= self.opcode:
+            raise ValueError('Opcode cannot be a reserved opcode')
+
+        ## +-+-+-+-+-------+
+        ## |F|R|R|R| opcode|
+        ## |I|S|S|S|  (4)  |
+        ## |N|V|V|V|       |
+        ## | |1|2|3|       |
+        ## +-+-+-+-+-------+
+        header = pack('!B', ((self.fin << 7)
+                             | (self.rsv1 << 6)
+                             | (self.rsv2 << 5)
+                             | (self.rsv3 << 4)
+                             | self.opcode))
+
+        ##                 +-+-------------+-------------------------------+
+        ##                 |M| Payload len |    Extended payload length    |
+        ##                 |A|     (7)     |             (16/63)           |
+        ##                 |S|             |   (if payload len==126/127)   |
+        ##                 |K|             |                               |
+        ## +-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - +
+        ## |     Extended payload length continued, if payload len == 127  |
+        ## + - - - - - - - - - - - - - - - +-------------------------------+
+        if self.masking_key: mask_bit = 1 << 7
+        else: mask_bit = 0
+
+        length = self.payload_length
+        if length < 126:
+            header += pack('!B', (mask_bit | length))
+        elif length < (1 << 16):
+            header += pack('!B', (mask_bit | 126)) + pack('!H', length)
+        elif length < (1 << 63):
+            header += pack('!B', (mask_bit | 127)) + pack('!Q', length)
+        else:
+            raise FrameTooLargeException()
+        
+        ## + - - - - - - - - - - - - - - - +-------------------------------+
+        ## |                               |Masking-key, if MASK set to 1  |
+        ## +-------------------------------+-------------------------------+
+        ## | Masking-key (continued)       |          Payload Data         |
+        ## +-------------------------------- - - - - - - - - - - - - - - - +
+        ## :                     Payload Data continued ...                :
+        ## + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +
+        ## |                     Payload Data continued ...                |
+        ## +---------------------------------------------------------------+
+        body = self.body
+        if not self.masking_key:
+            return bytes(header + body)
+
+        return bytes(header + self.masking_key + self.mask(body))
+
+    def _parsing(self):
+        """
+        Generator to parse bytes into a frame. Yields until
+        enough bytes have been read or an error is met.
+        """
+        buf = b''
+        some_bytes = b''
+
+        # yield until we get the first header's byte
+        while not some_bytes:
+            some_bytes = (yield 1)
+
+        first_byte = some_bytes[0] if isinstance(some_bytes, bytearray) else ord(some_bytes[0])
+        # frame-fin = %x0 ; more frames of this message follow
+        #           / %x1 ; final frame of this message
+        self.fin = (first_byte >> 7) & 1
+        self.rsv1 = (first_byte >> 6) & 1
+        self.rsv2 = (first_byte >> 5) & 1
+        self.rsv3 = (first_byte >> 4) & 1
+        self.opcode = first_byte & 0xf
+
+        # frame-rsv1 = %x0 ; 1 bit, MUST be 0 unless negotiated otherwise
+        # frame-rsv2 = %x0 ; 1 bit, MUST be 0 unless negotiated otherwise
+        # frame-rsv3 = %x0 ; 1 bit, MUST be 0 unless negotiated otherwise
+        if self.rsv1 or self.rsv2 or self.rsv3:
+            raise ProtocolException()
+
+        # control frames between 3 and 7 as well as above 0xA are currently reserved
+        if 2 < self.opcode < 8 or self.opcode > 0xA:
+            raise ProtocolException()
+
+        # control frames cannot be fragmented
+        if self.opcode > 0x7 and self.fin == 0:
+            raise ProtocolException()
+
+        # do we already have enough some_bytes to continue?
+        some_bytes = some_bytes[1:] if some_bytes and len(some_bytes) > 1 else b''
+
+        # Yield until we get the second header's byte
+        while not some_bytes:
+            some_bytes = (yield 1)
+
+        second_byte = some_bytes[0] if isinstance(some_bytes, bytearray) else ord(some_bytes[0])
+        mask = (second_byte >> 7) & 1
+        self.payload_length = second_byte & 0x7f
+
+        # All control frames MUST have a payload length of 125 some_bytes or less
+        if self.opcode > 0x7 and self.payload_length > 125:
+            raise FrameTooLargeException()
+
+        if some_bytes and len(some_bytes) > 1:
+            buf = some_bytes[1:]
+            some_bytes = buf
+        else:
+            buf = b''
+            some_bytes = b''
+
+        if self.payload_length == 127:
+            # This will compute the actual application data size
+            if len(buf) < 8:
+                nxt_buf_size = 8 - len(buf)
+                some_bytes = (yield nxt_buf_size)
+                some_bytes = buf + (some_bytes or b'')
+                while len(some_bytes) < 8:
+                    b = (yield 8 - len(some_bytes))
+                    if b is not None:
+                        some_bytes = some_bytes + b
+                if len(some_bytes) > 8:
+                    buf = some_bytes[8:]
+                    some_bytes = some_bytes[:8]
+            else:
+                some_bytes = buf[:8]
+                buf = buf[8:]
+            extended_payload_length = some_bytes
+            self.payload_length = unpack(
+                '!Q', extended_payload_length)[0]
+            if self.payload_length > 0x7FFFFFFFFFFFFFFF:
+                raise FrameTooLargeException()
+        elif self.payload_length == 126:
+            if len(buf) < 2:
+                nxt_buf_size = 2 - len(buf)
+                some_bytes = (yield nxt_buf_size)
+                some_bytes = buf + (some_bytes or b'')
+                while len(some_bytes) < 2:
+                    b = (yield 2 - len(some_bytes))
+                    if b is not None:
+                        some_bytes = some_bytes + b
+                if len(some_bytes) > 2:
+                    buf = some_bytes[2:]
+                    some_bytes = some_bytes[:2]
+            else:
+                some_bytes = buf[:2]
+                buf = buf[2:]
+            extended_payload_length = some_bytes
+            self.payload_length = unpack(
+                '!H', extended_payload_length)[0]
+
+        if mask:
+            if len(buf) < 4:
+                nxt_buf_size = 4 - len(buf)
+                some_bytes = (yield nxt_buf_size)
+                some_bytes = buf + (some_bytes or b'')
+                while not some_bytes or len(some_bytes) < 4:
+                    b = (yield 4 - len(some_bytes))
+                    if b is not None:
+                        some_bytes = some_bytes + b
+                if len(some_bytes) > 4:
+                    buf = some_bytes[4:]
+            else:
+                some_bytes = buf[:4]
+                buf = buf[4:]
+            self.masking_key = some_bytes
+
+        if len(buf) < self.payload_length:
+            nxt_buf_size = self.payload_length - len(buf)
+            some_bytes = (yield nxt_buf_size)
+            some_bytes = buf + (some_bytes or b'')
+            while len(some_bytes) < self.payload_length:
+                l = self.payload_length - len(some_bytes)
+                b = (yield l)
+                if b is not None:
+                    some_bytes = some_bytes + b
+        else:
+            if self.payload_length == len(buf):
+                some_bytes = buf
+            else:
+                some_bytes = buf[:self.payload_length]
+
+        self.body = some_bytes
+        yield
+
+    def mask(self, data):
+        """
+        Performs the masking or unmasking operation on data
+        using the simple masking algorithm:
+
+        ..
+           j                   = i MOD 4
+           transformed-octet-i = original-octet-i XOR masking-key-octet-j
+
+        """
+        masked = bytearray(data)
+        if py3k: key = self.masking_key
+        else: key = map(ord, self.masking_key)
+        for i in range(len(data)):
+            masked[i] = masked[i] ^ key[i%4]
+        return masked
+    unmask = mask

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/manager.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/manager.py b/ambari-common/src/main/python/ambari_ws4py/manager.py
new file mode 100644
index 0000000..23fd8e1
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/manager.py
@@ -0,0 +1,368 @@
+# -*- coding: utf-8 -*-
+__doc__ = """
+The manager module provides a selected classes to
+handle websocket's execution.
+
+Initially the rationale was to:
+
+- Externalize the way the CherryPy server had been setup
+  as its websocket management was too tightly coupled with
+  the plugin implementation.
+- Offer a management that could be used by other
+  server or client implementations.
+- Move away from the threaded model to the event-based
+  model by relying on `select` or `epoll` (when available).
+
+
+A simple usage for handling websocket clients:
+
+.. code-block:: python
+
+    from ambari_ws4py.client import WebSocketBaseClient
+    from ambari_ws4py.manager import WebSocketManager
+
+    m = WebSocketManager()
+
+    class EchoClient(WebSocketBaseClient):
+        def handshake_ok(self):
+            m.add(self)  # register the client once the handshake is done
+
+        def received_message(self, msg):
+            print str(msg)
+
+    m.start()
+
+    client = EchoClient('ws://localhost:9000/ws')
+    client.connect()
+
+    m.join()  # blocks forever
+
+Managers are not compulsory but hopefully will help your
+workflow. For clients, you can still rely on threaded, gevent or
+tornado based implementations of course.
+"""
+import logging
+import select
+import threading
+import time
+
+from ambari_ws4py import format_addresses
+from ambari_ws4py.compat import py3k
+
+logger = logging.getLogger('ambari_ws4py')
+
+class SelectPoller(object):
+    def __init__(self, timeout=0.1):
+        """
+        A socket poller that uses the `select`
+        implementation to determines which
+        file descriptors have data available to read.
+
+        It is available on all platforms.
+        """
+        self._fds = []
+        self.timeout = timeout
+
+    def release(self):
+        """
+        Cleanup resources.
+        """
+        self._fds = []
+
+    def register(self, fd):
+        """
+        Register a new file descriptor to be
+        part of the select polling next time around.
+        """
+        if fd not in self._fds:
+            self._fds.append(fd)
+
+    def unregister(self, fd):
+        """
+        Unregister the given file descriptor.
+        """
+        if fd in self._fds:
+            self._fds.remove(fd)
+
+    def poll(self):
+        """
+        Polls once and returns a list of
+        ready-to-be-read file descriptors.
+        """
+        if not self._fds:
+            time.sleep(self.timeout)
+            return []
+        try:
+            r, w, x = select.select(self._fds, [], [], self.timeout)
+        except IOError as e:
+            return []
+        return r
+
+class EPollPoller(object):
+    def __init__(self, timeout=0.1):
+        """
+        An epoll poller that uses the ``epoll``
+        implementation to determines which
+        file descriptors have data available to read.
+
+        Available on Unix flavors mostly.
+        """
+        self.poller = select.epoll()
+        self.timeout = timeout
+
+    def release(self):
+        """
+        Cleanup resources.
+        """
+        self.poller.close()
+
+    def register(self, fd):
+        """
+        Register a new file descriptor to be
+        part of the select polling next time around.
+        """
+        try:
+            self.poller.register(fd, select.EPOLLIN | select.EPOLLPRI)
+        except IOError:
+            pass
+
+    def unregister(self, fd):
+        """
+        Unregister the given file descriptor.
+        """
+        self.poller.unregister(fd)
+
+    def poll(self):
+        """
+        Polls once and yields each ready-to-be-read
+        file-descriptor
+        """
+        try:
+            events = self.poller.poll(timeout=self.timeout)
+        except IOError:
+            events = []
+
+        for fd, event in events:
+            if event | select.EPOLLIN | select.EPOLLPRI:
+                yield fd
+
+class KQueuePoller(object):
+    def __init__(self, timeout=0.1):
+        """
+        An epoll poller that uses the ``epoll``
+        implementation to determines which
+        file descriptors have data available to read.
+
+        Available on Unix flavors mostly.
+        """
+        self.poller = select.epoll()
+        self.timeout = timeout
+
+    def release(self):
+        """
+        Cleanup resources.
+        """
+        self.poller.close()
+
+    def register(self, fd):
+        """
+        Register a new file descriptor to be
+        part of the select polling next time around.
+        """
+        try:
+            self.poller.register(fd, select.EPOLLIN | select.EPOLLPRI)
+        except IOError:
+            pass
+
+    def unregister(self, fd):
+        """
+        Unregister the given file descriptor.
+        """
+        self.poller.unregister(fd)
+
+    def poll(self):
+        """
+        Polls once and yields each ready-to-be-read
+        file-descriptor
+        """
+        try:
+            events = self.poller.poll(timeout=self.timeout)
+        except IOError:
+            events = []
+        for fd, event in events:
+            if event | select.EPOLLIN | select.EPOLLPRI:
+                yield fd
+
+class WebSocketManager(threading.Thread):
+    def __init__(self, poller=None):
+        """
+        An event-based websocket manager. By event-based, we mean
+        that the websockets will be called when their
+        sockets have data to be read from.
+
+        The manager itself runs in its own thread as not to
+        be the blocking mainloop of your application.
+
+        The poller's implementation is automatically chosen
+        with ``epoll`` if available else ``select`` unless you
+        provide your own ``poller``.
+        """
+        threading.Thread.__init__(self)
+        self.name = "WebSocketManager"
+        self.lock = threading.Lock()
+        self.websockets = {}
+        self.running = False
+
+        if poller:
+            self.poller = poller
+        else:
+            if hasattr(select, "epoll"):
+                self.poller = EPollPoller()
+                logger.info("Using epoll")
+            else:
+                self.poller = SelectPoller()
+                logger.info("Using select as epoll is not available")
+
+    def __len__(self):
+        return len(self.websockets)
+
+    def __iter__(self):
+        if py3k:
+            return iter(self.websockets.values())
+        else:
+            return self.websockets.itervalues()
+
+    def __contains__(self, ws):
+        fd = ws.sock.fileno()
+        # just in case the file descriptor was reused
+        # we actually check the instance (well, this might
+        # also have been reused...)
+        return self.websockets.get(fd) is ws
+
+    def add(self, websocket):
+        """
+        Manage a new websocket.
+
+        First calls its :meth:`opened() <ambari_ws4py.websocket.WebSocket.opened>`
+        method and register its socket against the poller
+        for reading events.
+        """
+        if websocket in self:
+            return
+
+        logger.info("Managing websocket %s" % format_addresses(websocket))
+        websocket.opened()
+        with self.lock:
+            fd = websocket.sock.fileno()
+            self.websockets[fd] = websocket
+            self.poller.register(fd)
+
+    def remove(self, websocket):
+        """
+        Remove the given ``websocket`` from the manager.
+
+        This does not call its :meth:`closed() <ambari_ws4py.websocket.WebSocket.closed>`
+        method as it's out-of-band by your application
+        or from within the manager's run loop.
+        """
+        if websocket not in self:
+            return
+
+        logger.info("Removing websocket %s" % format_addresses(websocket))
+        with self.lock:
+            fd = websocket.sock.fileno()
+            self.websockets.pop(fd, None)
+            self.poller.unregister(fd)
+
+    def stop(self):
+        """
+        Mark the manager as terminated and
+        releases its resources.
+        """
+        self.running = False
+        with self.lock:
+            self.websockets.clear()
+            self.poller.release()
+
+    def run(self):
+        """
+        Manager's mainloop executed from within a thread.
+
+        Constantly poll for read events and, when available,
+        call related websockets' `once` method to
+        read and process the incoming data.
+
+        If the :meth:`once() <ambari_ws4py.websocket.WebSocket.once>`
+        method returns a `False` value, its :meth:`terminate() <ambari_ws4py.websocket.WebSocket.terminate>`
+        method is also applied to properly close
+        the websocket and its socket is unregistered from the poller.
+
+        Note that websocket shouldn't take long to process
+        their data or they will block the remaining
+        websockets with data to be handled. As for what long means,
+        it's up to your requirements.
+        """
+        self.running = True
+        while self.running:
+            with self.lock:
+                polled = self.poller.poll()
+            if not self.running:
+                break
+
+            for fd in polled:
+                if not self.running:
+                    break
+
+                ws = self.websockets.get(fd)
+                if ws and not ws.terminated:
+                    # I don't know what kind of errors might spew out of here
+                    # but they probably shouldn't crash the entire server.
+                    try:
+                        x = ws.once()
+                    # Treat the error as if once() had returned None
+                    except Exception as e:
+                        x = None
+                        logger.error("Terminating websocket %s due to exception: %s in once method" % (format_addresses(ws), repr(e)) )
+                    if not x:
+                        with self.lock:
+                            self.websockets.pop(fd, None)
+                            self.poller.unregister(fd)
+
+                        if not ws.terminated:
+                            logger.info("Terminating websocket %s" % format_addresses(ws))
+                            ws.terminate()
+
+
+    def close_all(self, code=1001, message='Server is shutting down'):
+        """
+        Execute the :meth:`close() <ambari_ws4py.websocket.WebSocket.close>`
+        method of each registered websockets to initiate the closing handshake.
+        It doesn't wait for the handshake to complete properly.
+        """
+        with self.lock:
+            logger.info("Closing all websockets with [%d] '%s'" % (code, message))
+            for ws in iter(self):
+                ws.close(code=code, reason=message)
+
+    def broadcast(self, message, binary=False):
+        """
+        Broadcasts the given message to all registered
+        websockets, at the time of the call.
+
+        Broadcast may fail on a given registered peer
+        but this is silent as it's not the method's
+        purpose to handle websocket's failures.
+        """
+        with self.lock:
+            websockets = self.websockets.copy()
+            if py3k:
+                ws_iter = iter(websockets.values())
+            else:
+                ws_iter = websockets.itervalues()
+
+        for ws in ws_iter:
+            if not ws.terminated:
+                try:
+                    ws.send(message, binary)
+                except:
+                    pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/messaging.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/messaging.py b/ambari-common/src/main/python/ambari_ws4py/messaging.py
new file mode 100644
index 0000000..d94ee6e
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/messaging.py
@@ -0,0 +1,169 @@
+# -*- coding: utf-8 -*-
+import os
+import struct
+
+from ambari_ws4py.framing import Frame, OPCODE_CONTINUATION, OPCODE_TEXT, \
+     OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG
+from ambari_ws4py.compat import unicode, py3k
+
+__all__ = ['Message', 'TextMessage', 'BinaryMessage', 'CloseControlMessage',
+           'PingControlMessage', 'PongControlMessage']
+
+class Message(object):
+    def __init__(self, opcode, data=b'', encoding='utf-8'):
+        """
+        A message is a application level entity. It's usually built
+        from one or many frames. The protocol defines several kind
+        of messages which are grouped into two sets:
+
+        * data messages which can be text or binary typed
+        * control messages which provide a mechanism to perform
+          in-band control communication between peers
+
+        The ``opcode`` indicates the message type and ``data`` is
+        the possible message payload.
+
+        The payload is held internally as a a :func:`bytearray` as they are
+        faster than pure strings for append operations.
+
+        Unicode data will be encoded using the provided ``encoding``.
+        """
+        self.opcode = opcode
+        self._completed = False
+        self.encoding = encoding
+
+        if isinstance(data, unicode):
+            if not encoding:
+                raise TypeError("unicode data without an encoding")
+            data = data.encode(encoding)
+        elif isinstance(data, bytearray):
+            data = bytes(data)
+        elif not isinstance(data, bytes):
+            raise TypeError("%s is not a supported data type" % type(data))
+
+        self.data = data
+
+    def single(self, mask=False):
+        """
+        Returns a frame bytes with the fin bit set and a random mask.
+
+        If ``mask`` is set, automatically mask the frame
+        using a generated 4-byte token.
+        """
+        mask = os.urandom(4) if mask else None
+        return Frame(body=self.data, opcode=self.opcode,
+                     masking_key=mask, fin=1).build()
+
+    def fragment(self, first=False, last=False, mask=False):
+        """
+        Returns a :class:`ambari_ws4py.framing.Frame` bytes.
+
+        The behavior depends on the given flags:
+
+        * ``first``: the frame uses ``self.opcode`` else a continuation opcode
+        * ``last``: the frame has its ``fin`` bit set
+        * ``mask``: the frame is masked using a automatically generated 4-byte token
+        """
+        fin = 1 if last is True else 0
+        opcode = self.opcode if first is True else OPCODE_CONTINUATION
+        mask = os.urandom(4) if mask else None
+        return Frame(body=self.data,
+                     opcode=opcode, masking_key=mask,
+                     fin=fin).build()
+
+    @property
+    def completed(self):
+        """
+        Indicates the the message is complete, meaning
+        the frame's ``fin`` bit was set.
+        """
+        return self._completed
+
+    @completed.setter
+    def completed(self, state):
+        """
+        Sets the state for this message. Usually
+        set by the stream's parser.
+        """
+        self._completed = state
+
+    def extend(self, data):
+        """
+        Add more ``data`` to the message.
+        """
+        if isinstance(data, bytes):
+            self.data += data
+        elif isinstance(data, bytearray):
+            self.data += bytes(data)
+        elif isinstance(data, unicode):
+            self.data += data.encode(self.encoding)
+        else:
+            raise TypeError("%s is not a supported data type" % type(data))
+
+    def __len__(self):
+        return len(self.__unicode__())
+
+    def __str__(self):
+        if py3k:
+            return self.data.decode(self.encoding)
+        return self.data
+
+    def __unicode__(self):
+        return self.data.decode(self.encoding)
+
+class TextMessage(Message):
+    def __init__(self, text=None):
+        Message.__init__(self, OPCODE_TEXT, text)
+
+    @property
+    def is_binary(self):
+        return False
+
+    @property
+    def is_text(self):
+        return True
+
+class BinaryMessage(Message):
+    def __init__(self, bytes=None):
+        Message.__init__(self, OPCODE_BINARY, bytes, encoding=None)
+
+    @property
+    def is_binary(self):
+        return True
+
+    @property
+    def is_text(self):
+        return False
+
+    def __len__(self):
+        return len(self.data)
+
+class CloseControlMessage(Message):
+    def __init__(self, code=1000, reason=''):
+        data = b""
+        if code:
+            data += struct.pack("!H", code)
+        if reason is not None:
+            if isinstance(reason, unicode):
+                reason = reason.encode('utf-8')
+            data += reason
+
+        Message.__init__(self, OPCODE_CLOSE, data, 'utf-8')
+        self.code = code
+        self.reason = reason
+
+    def __str__(self):
+        if py3k:
+            return self.reason.decode('utf-8')
+        return self.reason
+
+    def __unicode__(self):
+        return self.reason.decode(self.encoding)
+
+class PingControlMessage(Message):
+    def __init__(self, data=None):
+        Message.__init__(self, OPCODE_PING, data)
+
+class PongControlMessage(Message):
+    def __init__(self, data):
+        Message.__init__(self, OPCODE_PONG, data)

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/streaming.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/streaming.py b/ambari-common/src/main/python/ambari_ws4py/streaming.py
new file mode 100644
index 0000000..61063ae
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/streaming.py
@@ -0,0 +1,319 @@
+# -*- coding: utf-8 -*-
+import struct
+from struct import unpack
+
+from ambari_ws4py.utf8validator import Utf8Validator
+from ambari_ws4py.messaging import TextMessage, BinaryMessage, CloseControlMessage,\
+     PingControlMessage, PongControlMessage
+from ambari_ws4py.framing import Frame, OPCODE_CONTINUATION, OPCODE_TEXT, \
+     OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG
+from ambari_ws4py.exc import FrameTooLargeException, ProtocolException, InvalidBytesError,\
+     TextFrameEncodingException, UnsupportedFrameTypeException, StreamClosed
+from ambari_ws4py.compat import py3k
+
+VALID_CLOSING_CODES = [1000, 1001, 1002, 1003, 1007, 1008, 1009, 1010, 1011]
+
+class Stream(object):
+    def __init__(self, always_mask=False, expect_masking=True):
+        """ Represents a websocket stream of bytes flowing in and out.
+
+        The stream doesn't know about the data provider itself and
+        doesn't even know about sockets. Instead the stream simply
+        yields for more bytes whenever it requires them. The stream owner
+        is responsible to provide the stream with those bytes until
+        a frame can be interpreted.
+
+        .. code-block:: python
+           :linenos:
+
+           >>> s = Stream()
+           >>> s.parser.send(BYTES)
+           >>> s.has_messages
+           False
+           >>> s.parser.send(MORE_BYTES)
+           >>> s.has_messages
+           True
+           >>> s.message
+           <TextMessage ... >
+
+        Set ``always_mask`` to mask all frames built.
+
+        Set ``expect_masking`` to indicate masking will be
+        checked on all parsed frames.
+        """
+
+        self.message = None
+        """
+        Parsed test or binary messages. Whenever the parser
+        reads more bytes from a fragment message, those bytes
+        are appended to the most recent message.
+        """
+
+        self.pings = []
+        """
+        Parsed ping control messages. They are instances of
+        :class:`ambari_ws4py.messaging.PingControlMessage`
+        """
+
+        self.pongs = []
+        """
+        Parsed pong control messages. They are instances of
+        :class:`ambari_ws4py.messaging.PongControlMessage`
+        """
+
+        self.closing = None
+        """
+        Parsed close control messsage. Instance of
+        :class:`ambari_ws4py.messaging.CloseControlMessage`
+        """
+
+        self.errors = []
+        """
+        Detected errors while parsing. Instances of
+        :class:`ambari_ws4py.messaging.CloseControlMessage`
+        """
+
+        self._parser = None
+        """
+        Parser in charge to process bytes it is fed with.
+        """
+
+        self.always_mask = always_mask
+        self.expect_masking = expect_masking
+
+    @property
+    def parser(self):
+        if self._parser is None:
+            self._parser = self.receiver()
+            # Python generators must be initialized once.
+            next(self.parser)
+        return self._parser
+
+    def _cleanup(self):
+        """
+        Frees the stream's resources rendering it unusable.
+        """
+        self.message = None
+        if self._parser is not None:
+            if not self._parser.gi_running:
+                self._parser.close()
+            self._parser = None
+        self.errors = None
+        self.pings = None
+        self.pongs = None
+        self.closing = None
+
+    def text_message(self, text):
+        """
+        Returns a :class:`ambari_ws4py.messaging.TextMessage` instance
+        ready to be built. Convenience method so
+        that the caller doesn't need to import the
+        :class:`ambari_ws4py.messaging.TextMessage` class itself.
+        """
+        return TextMessage(text=text)
+
+    def binary_message(self, bytes):
+        """
+        Returns a :class:`ambari_ws4py.messaging.BinaryMessage` instance
+        ready to be built. Convenience method so
+        that the caller doesn't need to import the
+        :class:`ambari_ws4py.messaging.BinaryMessage` class itself.
+        """
+        return BinaryMessage(bytes)
+
+    @property
+    def has_message(self):
+        """
+        Checks if the stream has received any message
+        which, if fragmented, is now completed.
+        """
+        if self.message is not None:
+            return self.message.completed
+
+        return False
+
+    def close(self, code=1000, reason=''):
+        """
+        Returns a close control message built from
+        a :class:`ambari_ws4py.messaging.CloseControlMessage` instance,
+        using the given status ``code`` and ``reason`` message.
+        """
+        return CloseControlMessage(code=code, reason=reason)
+
+    def ping(self, data=''):
+        """
+        Returns a ping control message built from
+        a :class:`ambari_ws4py.messaging.PingControlMessage` instance.
+        """
+        return PingControlMessage(data).single(mask=self.always_mask)
+
+    def pong(self, data=''):
+        """
+        Returns a ping control message built from
+        a :class:`ambari_ws4py.messaging.PongControlMessage` instance.
+        """
+        return PongControlMessage(data).single(mask=self.always_mask)
+
+    def receiver(self):
+        """
+        Parser that keeps trying to interpret bytes it is fed with as
+        incoming frames part of a message.
+
+        Control message are single frames only while data messages, like text
+        and binary, may be fragmented accross frames.
+
+        The way it works is by instanciating a :class:`wspy.framing.Frame` object,
+        then running its parser generator which yields how much bytes
+        it requires to performs its task. The stream parser yields this value
+        to its caller and feeds the frame parser.
+
+        When the frame parser raises :exc:`StopIteration`, the stream parser
+        tries to make sense of the parsed frame. It dispatches the frame's bytes
+        to the most appropriate message type based on the frame's opcode.
+
+        Overall this makes the stream parser totally agonstic to
+        the data provider.
+        """
+        utf8validator = Utf8Validator()
+        running = True
+        frame = None
+        while running:
+            frame = Frame()
+            while 1:
+                try:
+                    some_bytes = (yield next(frame.parser))
+                    frame.parser.send(some_bytes)
+                except GeneratorExit:
+                    running = False
+                    break
+                except StopIteration:
+                    frame._cleanup()
+                    some_bytes = frame.body
+
+                    # Let's avoid unmasking when there is no payload
+                    if some_bytes:
+                        if frame.masking_key and self.expect_masking:
+                            some_bytes = frame.unmask(some_bytes)
+                        elif not frame.masking_key and self.expect_masking:
+                            msg = CloseControlMessage(code=1002, reason='Missing masking when expected')
+                            self.errors.append(msg)
+                            break
+                        elif frame.masking_key and not self.expect_masking:
+                            msg = CloseControlMessage(code=1002, reason='Masked when not expected')
+                            self.errors.append(msg)
+                            break
+                        else:
+                            # If we reach this stage, it's because
+                            # the frame wasn't masked and we didn't expect
+                            # it anyway. Therefore, on py2k, the bytes
+                            # are actually a str object and can't be used
+                            # in the utf8 validator as we need integers
+                            # when we get each byte one by one.
+                            # Our only solution here is to convert our
+                            # string to a bytearray.
+                            some_bytes = bytearray(some_bytes)
+
+                    if frame.opcode == OPCODE_TEXT:
+                        if self.message and not self.message.completed:
+                            # We got a text frame before we completed the previous one
+                            msg = CloseControlMessage(code=1002, reason='Received a new message before completing previous')
+                            self.errors.append(msg)
+                            break
+
+                        m = TextMessage(some_bytes)
+                        m.completed = (frame.fin == 1)
+                        self.message = m
+
+                        if some_bytes:
+                            is_valid, end_on_code_point, _, _ = utf8validator.validate(some_bytes)
+
+                            if not is_valid or (m.completed and not end_on_code_point):
+                                self.errors.append(CloseControlMessage(code=1007, reason='Invalid UTF-8 bytes'))
+                                break
+
+                    elif frame.opcode == OPCODE_BINARY:
+                        if self.message and not self.message.completed:
+                            # We got a text frame before we completed the previous one
+                            msg = CloseControlMessage(code=1002, reason='Received a new message before completing previous')
+                            self.errors.append(msg)
+                            break
+
+                        m = BinaryMessage(some_bytes)
+                        m.completed = (frame.fin == 1)
+                        self.message = m
+
+                    elif frame.opcode == OPCODE_CONTINUATION:
+                        m = self.message
+                        if m is None:
+                            self.errors.append(CloseControlMessage(code=1002, reason='Message not started yet'))
+                            break
+
+                        m.extend(some_bytes)
+                        m.completed = (frame.fin == 1)
+                        if m.opcode == OPCODE_TEXT:
+                            if some_bytes:
+                                is_valid, end_on_code_point, _, _ = utf8validator.validate(some_bytes)
+
+                                if not is_valid or (m.completed and not end_on_code_point):
+                                    self.errors.append(CloseControlMessage(code=1007, reason='Invalid UTF-8 bytes'))
+                                    break
+
+                    elif frame.opcode == OPCODE_CLOSE:
+                        code = 1005
+                        reason = ""
+                        if frame.payload_length == 0:
+                            self.closing = CloseControlMessage(code=1005)
+                        elif frame.payload_length == 1:
+                            self.closing = CloseControlMessage(code=1005, reason='Payload has invalid length')
+                        else:
+                            try:
+                                # at this stage, some_bytes have been unmasked
+                                # so actually are held in a bytearray
+                                code = int(unpack("!H", bytes(some_bytes[0:2]))[0])
+                            except struct.error:
+                                reason = 'Failed at decoding closing code'
+                            else:
+                                # Those codes are reserved or plainly forbidden
+                                if code not in VALID_CLOSING_CODES and not (2999 < code < 5000):
+                                    reason = 'Invalid Closing Frame Code: %d' % code
+                                    code = 1005
+                                elif frame.payload_length > 1:
+                                    reason = some_bytes[2:] if frame.masking_key else frame.body[2:]
+
+                                    if not py3k: reason = bytearray(reason)
+                                    is_valid, end_on_code_point, _, _ = utf8validator.validate(reason)
+                                    if not is_valid or not end_on_code_point:
+                                        self.errors.append(CloseControlMessage(code=1007, reason='Invalid UTF-8 bytes'))
+                                        break
+                                    reason = bytes(reason)
+                            self.closing = CloseControlMessage(code=code, reason=reason)
+
+                    elif frame.opcode == OPCODE_PING:
+                        self.pings.append(PingControlMessage(some_bytes))
+
+                    elif frame.opcode == OPCODE_PONG:
+                        self.pongs.append(PongControlMessage(some_bytes))
+
+                    else:
+                        self.errors.append(CloseControlMessage(code=1003))
+
+                    break
+
+                except ProtocolException:
+                    self.errors.append(CloseControlMessage(code=1002))
+                    break
+                except FrameTooLargeException:
+                    self.errors.append(CloseControlMessage(code=1002, reason="Frame was too large"))
+                    break
+
+            frame._cleanup()
+            frame.body = None
+            frame = None
+
+            if self.message is not None and self.message.completed:
+                utf8validator.reset()
+
+        utf8validator.reset()
+        utf8validator = None
+
+        self._cleanup()


[26/41] ambari git commit: AMBARI-20659.Notification widgets position have been fixed and size have been reduced.(Venkata Sairam via padmapriyanitt)

Posted by ao...@apache.org.
AMBARI-20659.Notification widgets position have been fixed and size have been reduced.(Venkata Sairam via padmapriyanitt)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0472e5f1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0472e5f1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0472e5f1

Branch: refs/heads/branch-3.0-perf
Commit: 0472e5f1ecf59ea5320b3544c259b95642548c59
Parents: 27105de
Author: padmapriyanitt <pa...@gmail.com>
Authored: Tue Apr 4 15:09:30 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../hive20/src/main/resources/ui/app/styles/app.scss  | 14 ++++++++++++--
 .../ui/app/templates/components/alert-message.hbs     |  6 +++---
 2 files changed, 15 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0472e5f1/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
index e083922..650c1d6 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
@@ -681,7 +681,7 @@ pre {
 }
 
 .flash-messages-wrap {
-  position: absolute;
+  position: fixed;
   width: 40%;
   min-width: 575px;
   left: 50%;
@@ -693,12 +693,16 @@ pre {
   border-radius: $border-radius-large;
   .alert-icon {
     float: left;
+    position: relative;
+    bottom: 1px;
+    left: 10px;
     margin-right: 15px;
+    max-height: 15px;
   }
 
   .alert-message-wrap {
     display: table;
-    min-height: 56px;
+    min-height: 25px;
     .alert-message {
       display: table-cell;
       vertical-align: middle;
@@ -707,6 +711,12 @@ pre {
 
 }
 
+.wrap-message {
+  span {
+    line-height: 1.9em;
+  }
+}
+
 .jobs-status {
   border-top: 1px solid darken($body-bg, 10%);
   border-bottom: 1px solid darken($body-bg, 10%);

http://git-wip-us.apache.org/repos/asf/ambari/blob/0472e5f1/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs
index 2f114f9..abc66ec 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs
@@ -19,9 +19,9 @@
 <div class={{alert-message-context-class flash.type "clearfix alert alert-dismissible alert-"}}>
   <button type="button" class="close" {{action "closeAlert"}}>&times;</button>
   <div class="alert-icon wrap-message">
-    {{#fa-stack size=2}}
-      {{fa-icon "circle-thin" stack=2}}
-      {{fa-icon (alert-message-icon-class flash.type) stack=1}}
+    {{#fa-stack size=1}}
+      {{fa-icon "circle-thin" stack=2 size=1}}
+      {{fa-icon (alert-message-icon-class flash.type) stack=1  size=1}}
     {{/fa-stack}}
   </div>
   <div class="alert-message-wrap wrap-message">


[32/41] ambari git commit: AMBARI-20671 Host checks: incorrect message for single host warnings. (ababiichuk)

Posted by ao...@apache.org.
AMBARI-20671 Host checks: incorrect message for single host warnings. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/01e0c695
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/01e0c695
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/01e0c695

Branch: refs/heads/branch-3.0-perf
Commit: 01e0c695812a4e1d8c3a3d44829fdbc9b853ec63
Parents: b38ba22
Author: ababiichuk <ab...@hortonworks.com>
Authored: Tue Apr 4 19:22:31 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../app/controllers/wizard/step3_controller.js  |  3 +-
 .../main/host/details/actions/check_host.js     | 86 +++++---------------
 .../wizard/step3/step3_host_warnings_popup.hbs  |  6 +-
 .../test/controllers/wizard/step3_test.js       | 34 --------
 4 files changed, 24 insertions(+), 105 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/01e0c695/ambari-web/app/controllers/wizard/step3_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step3_controller.js b/ambari-web/app/controllers/wizard/step3_controller.js
index 7a07253..c282451 100644
--- a/ambari-web/app/controllers/wizard/step3_controller.js
+++ b/ambari-web/app/controllers/wizard/step3_controller.js
@@ -787,8 +787,7 @@ App.WizardStep3Controller = Em.Controller.extend(App.ReloadPopupMixin, App.Check
           hosts: hostsJDKContext,
           hostsLong: hostsJDKContext,
           hostsNames: hostsJDKNames,
-          category: 'jdk',
-          onSingleHost: false
+          category: 'jdk'
         });
       }
       this.set('jdkCategoryWarnings', jdkWarnings);

http://git-wip-us.apache.org/repos/asf/ambari/blob/01e0c695/ambari-web/app/mixins/main/host/details/actions/check_host.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/main/host/details/actions/check_host.js b/ambari-web/app/mixins/main/host/details/actions/check_host.js
index fbee24c..c0ebf02 100644
--- a/ambari-web/app/mixins/main/host/details/actions/check_host.js
+++ b/ambari-web/app/mixins/main/host/details/actions/check_host.js
@@ -53,7 +53,6 @@ App.CheckHostMixin = Em.Mixin.create({
    *  hosts: string[],
    *  hostsLong: string[],
    *  hostsNames: string[],
-   *  onSingleHost: boolean
    * }} checkWarning
    */
 
@@ -309,8 +308,7 @@ App.CheckHostMixin = Em.Mixin.create({
         hosts: hostsContext,
         hostsLong: hostsContext,
         hostsNames: hostsRepoNames,
-        category: 'repositories',
-        onSingleHost: false
+        category: 'repositories'
       });
     }
     if (hostsDiskContext.length > 0) { // disk space warning exist
@@ -319,8 +317,7 @@ App.CheckHostMixin = Em.Mixin.create({
         hosts: hostsDiskContext,
         hostsLong: hostsDiskContext,
         hostsNames: hostsDiskNames,
-        category: 'disk',
-        onSingleHost: false
+        category: 'disk'
       });
     }
     if (thpContext.length > 0) { // THP warning existed
@@ -329,8 +326,7 @@ App.CheckHostMixin = Em.Mixin.create({
         hosts: thpContext,
         hostsLong: thpContext,
         hostsNames: thpHostsNames,
-        category: 'thp',
-        onSingleHost: false
+        category: 'thp'
       });
     }
 
@@ -373,8 +369,7 @@ App.CheckHostMixin = Em.Mixin.create({
               name: name,
               hosts: [contextMessage],
               hostsLong: [contextMessageLong],
-              hostsNames: [targetHostName],
-              onSingleHost: true
+              hostsNames: [targetHostName]
             };
             this.get("hostCheckWarnings").push(hostInfo);
           } else {
@@ -382,7 +377,6 @@ App.CheckHostMixin = Em.Mixin.create({
               hostInfo.hosts.push(contextMessage);
               hostInfo.hostsLong.push(contextMessageLong);
               hostInfo.hostsNames.push(targetHostName);
-              hostInfo.onSingleHost = false;
             }
           }
         }
@@ -549,14 +543,12 @@ App.CheckHostMixin = Em.Mixin.create({
         if (warning) {
           warning.hosts.push(hostName);
           warning.hostsLong.push(hostName);
-          warning.onSingleHost = false;
         } else {
           warningCategories.fileFoldersWarnings[path.name] = warning = {
             name: path.name,
             hosts: [hostName],
             hostsLong: [hostName],
-            category: 'fileFolders',
-            onSingleHost: true
+            category: 'fileFolders'
           };
         }
         host.warnings.push(warning);
@@ -572,15 +564,13 @@ App.CheckHostMixin = Em.Mixin.create({
             warning.hosts.push(hostName);
             warning.hostsLong.push(hostName);
             warning.version = _package.version;
-            warning.onSingleHost = false;
           } else {
             warningCategories.packagesWarnings[_package.name] = warning = {
               name: _package.name,
               version: _package.version,
               hosts: [hostName],
               hostsLong: [hostName],
-              category: 'packages',
-              onSingleHost: true
+              category: 'packages'
             };
           }
           host.warnings.push(warning);
@@ -606,7 +596,6 @@ App.CheckHostMixin = Em.Mixin.create({
           if (warning) {
             warning.hosts.push(hostName);
             warning.hostsLong.push(hostName);
-            warning.onSingleHost = false;
           } else {
             warningCategories.processesWarnings[process.pid] = warning = {
               name: (process.command.substr(0, 35) + '...'),
@@ -618,8 +607,7 @@ App.CheckHostMixin = Em.Mixin.create({
               command: '<table><tr><td style="word-break: break-all;">' +
               ((process.command.length < 500) ? process.command : process.command.substr(0, 230) + '...' +
               '<p style="text-align: center">................</p>' +
-              '...' + process.command.substr(-230)) + '</td></tr></table>',
-              onSingleHost: true
+              '...' + process.command.substr(-230)) + '</td></tr></table>'
             };
           }
           host.warnings.push(warning);
@@ -634,14 +622,12 @@ App.CheckHostMixin = Em.Mixin.create({
             if (warning) {
               warning.hosts.push(hostName);
               warning.hostsLong.push(hostName);
-              warning.onSingleHost = false;
             } else {
               warningCategories.servicesWarnings[service.name] = warning = {
                 name: service.name,
                 hosts: [hostName],
                 hostsLong: [hostName],
-                category: 'services',
-                onSingleHost: true
+                category: 'services'
               };
             }
             host.warnings.push(warning);
@@ -656,14 +642,12 @@ App.CheckHostMixin = Em.Mixin.create({
           if (warning) {
             warning.hosts.push(hostName);
             warning.hostsLong.push(hostName);
-            warning.onSingleHost = false;
           } else {
             warningCategories.usersWarnings[user.name] = warning = {
               name: user.name,
               hosts: [hostName],
               hostsLong: [hostName],
-              category: 'users',
-              onSingleHost: true
+              category: 'users'
             };
           }
           host.warnings.push(warning);
@@ -677,14 +661,12 @@ App.CheckHostMixin = Em.Mixin.create({
         if (warning) {
           warning.hosts.push(hostName);
           warning.hostsLong.push(hostName);
-          warning.onSingleHost = false;
         } else {
           warning = {
             name: umask,
             hosts: [hostName],
             hostsLong: [hostName],
-            category: 'misc',
-            onSingleHost: true
+            category: 'misc'
           };
           warnings.push(warning);
         }
@@ -698,14 +680,12 @@ App.CheckHostMixin = Em.Mixin.create({
         if (warning) {
           warning.hosts.push(hostName);
           warning.hostsLong.push(hostName);
-          warning.onSingleHost = false;
         } else {
           warning = {
             name: name,
             hosts: [hostName],
             hostsLong: [hostName],
-            category: 'firewall',
-            onSingleHost: true
+            category: 'firewall'
           };
           warnings.push(warning);
         }
@@ -718,15 +698,13 @@ App.CheckHostMixin = Em.Mixin.create({
           if (warning) {
             warning.hosts.push(hostName);
             warning.hostsLong.push(hostName);
-            warning.onSingleHost = false;
           } else {
             warningCategories.alternativeWarnings[alternative.name] = warning = {
               name: alternative.name,
               target: alternative.target,
               hosts: [hostName],
               hostsLong: [hostName],
-              category: 'alternatives',
-              onSingleHost: true
+              category: 'alternatives'
             };
           }
           host.warnings.push(warning);
@@ -739,14 +717,12 @@ App.CheckHostMixin = Em.Mixin.create({
         if (warning) {
           warning.hosts.push(hostName);
           warning.hostsLong.push(hostName);
-          warning.onSingleHost = false;
         } else {
           warning = {
             name: name,
             hosts: [hostName],
             hostsLong: [hostName],
-            category: 'reverseLookup',
-            onSingleHost: true
+            category: 'reverseLookup'
           };
           warnings.push(warning);
         }
@@ -810,14 +786,12 @@ App.CheckHostMixin = Em.Mixin.create({
         if (warning) {
           warning.hosts.push(_host.Hosts.host_name);
           warning.hostsLong.push(_host.Hosts.host_name);
-          warning.onSingleHost = false;
         } else {
           warningCategories.fileFoldersWarnings[path.name] = warning = {
             name: path.name,
             hosts: [_host.Hosts.host_name],
             hostsLong: [_host.Hosts.host_name],
-            category: 'fileFolders',
-            onSingleHost: true
+            category: 'fileFolders'
           };
         }
         host.warnings.push(warning);
@@ -833,15 +807,13 @@ App.CheckHostMixin = Em.Mixin.create({
             warning.hosts.push(_host.Hosts.host_name);
             warning.hostsLong.push(_host.Hosts.host_name);
             warning.version = _package.version;
-            warning.onSingleHost = false;
           } else {
             warningCategories.packagesWarnings[_package.name] = warning = {
               name: _package.name,
               version: _package.version,
               hosts: [_host.Hosts.host_name],
               hostsLong: [_host.Hosts.host_name],
-              category: 'packages',
-              onSingleHost: true
+              category: 'packages'
             };
           }
           host.warnings.push(warning);
@@ -858,7 +830,6 @@ App.CheckHostMixin = Em.Mixin.create({
           if (warning) {
             warning.hosts.push(_host.Hosts.host_name);
             warning.hostsLong.push(_host.Hosts.host_name);
-            warning.onSingleHost = false;
           } else {
             warningCategories.processesWarnings[process.pid] = warning = {
               name: (process.command.substr(0, 35) + '...'),
@@ -870,8 +841,7 @@ App.CheckHostMixin = Em.Mixin.create({
               command: '<table><tr><td style="word-break: break-all;">' +
               ((process.command.length < 500) ? process.command : process.command.substr(0, 230) + '...' +
               '<p style="text-align: center">................</p>' +
-              '...' + process.command.substr(-230)) + '</td></tr></table>',
-              onSingleHost: true
+              '...' + process.command.substr(-230)) + '</td></tr></table>'
             };
           }
           host.warnings.push(warning);
@@ -888,14 +858,12 @@ App.CheckHostMixin = Em.Mixin.create({
             if (warning) {
               warning.hosts.push(_host.Hosts.host_name);
               warning.hostsLong.push(_host.Hosts.host_name);
-              warning.onSingleHost = false;
             } else {
               warningCategories.servicesWarnings[service.name] = warning = {
                 name: service.name,
                 hosts: [_host.Hosts.host_name],
                 hostsLong: [_host.Hosts.host_name],
-                category: 'services',
-                onSingleHost: true
+                category: 'services'
               };
             }
             host.warnings.push(warning);
@@ -911,14 +879,12 @@ App.CheckHostMixin = Em.Mixin.create({
           if (warning) {
             warning.hosts.push(_host.Hosts.host_name);
             warning.hostsLong.push(_host.Hosts.host_name);
-            warning.onSingleHost = false;
           } else {
             warningCategories.usersWarnings[user.name] = warning = {
               name: user.name,
               hosts: [_host.Hosts.host_name],
               hostsLong: [_host.Hosts.host_name],
-              category: 'users',
-              onSingleHost: true
+              category: 'users'
             };
           }
           host.warnings.push(warning);
@@ -932,14 +898,12 @@ App.CheckHostMixin = Em.Mixin.create({
         if (warning) {
           warning.hosts.push(_host.Hosts.host_name);
           warning.hostsLong.push(_host.Hosts.host_name);
-          warning.onSingleHost = false;
         } else {
           warning = {
             name: umask,
             hosts: [_host.Hosts.host_name],
             hostsLong: [_host.Hosts.host_name],
-            category: 'misc',
-            onSingleHost: true
+            category: 'misc'
           };
           warnings.push(warning);
         }
@@ -953,14 +917,12 @@ App.CheckHostMixin = Em.Mixin.create({
         if (warning) {
           warning.hosts.push(_host.Hosts.host_name);
           warning.hostsLong.push(_host.Hosts.host_name);
-          warning.onSingleHost = false;
         } else {
           warning = {
             name: name,
             hosts: [_host.Hosts.host_name],
             hostsLong: [_host.Hosts.host_name],
-            category: 'firewall',
-            onSingleHost: true
+            category: 'firewall'
           };
           warnings.push(warning);
         }
@@ -973,15 +935,13 @@ App.CheckHostMixin = Em.Mixin.create({
           if (warning) {
             warning.hosts.push(_host.Hosts.host_name);
             warning.hostsLong.push(_host.Hosts.host_name);
-            warning.onSingleHost = false;
           } else {
             warningCategories.alternativeWarnings[alternative.name] = warning = {
               name: alternative.name,
               target: alternative.target,
               hosts: [_host.Hosts.host_name],
               hostsLong: [_host.Hosts.host_name],
-              category: 'alternatives',
-              onSingleHost: true
+              category: 'alternatives'
             };
           }
           host.warnings.push(warning);
@@ -994,14 +954,12 @@ App.CheckHostMixin = Em.Mixin.create({
         if (warning) {
           warning.hosts.push(_host.Hosts.host_name);
           warning.hostsLong.push(_host.Hosts.host_name);
-          warning.onSingleHost = false;
         } else {
           warning = {
             name: name,
             hosts: [_host.Hosts.host_name],
             hostsLong: [_host.Hosts.host_name],
-            category: 'reverseLookup',
-            onSingleHost: true
+            category: 'reverseLookup'
           };
           warnings.push(warning);
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/01e0c695/ambari-web/app/templates/wizard/step3/step3_host_warnings_popup.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/wizard/step3/step3_host_warnings_popup.hbs b/ambari-web/app/templates/wizard/step3/step3_host_warnings_popup.hbs
index e8c5201..07ff31b 100644
--- a/ambari-web/app/templates/wizard/step3/step3_host_warnings_popup.hbs
+++ b/ambari-web/app/templates/wizard/step3/step3_host_warnings_popup.hbs
@@ -90,11 +90,7 @@
                         <td>{{category.action}}
                           <a href="javascript:void(null);" rel='HostsListTooltip' {{bindAttr data-original-title="warning.hostsList"}} {{action showHostsPopup warning.hostsLong}} {{QAAttr "host-with-warning-link"}}>
                             {{warning.hosts.length}}
-                            {{#if warning.onSingleHost}}
-                              {{t installer.step3.hostWarningsPopup.host}}
-                            {{else}}
-                              {{t installer.step3.hostWarningsPopup.hosts}}
-                            {{/if}}
+                            {{pluralize warning.hosts.length singular="t:installer.step3.hostWarningsPopup.host" plular="t:installer.step3.hostWarningsPopup.hosts"}}
                           </a>
                         </td>
                       </tr>

http://git-wip-us.apache.org/repos/asf/ambari/blob/01e0c695/ambari-web/test/controllers/wizard/step3_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/wizard/step3_test.js b/ambari-web/test/controllers/wizard/step3_test.js
index fa90065..b8bc794 100644
--- a/ambari-web/test/controllers/wizard/step3_test.js
+++ b/ambari-web/test/controllers/wizard/step3_test.js
@@ -1292,7 +1292,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1'],
-                    onSingleHost: true,
                     category: 'fileFolders'
                   }
                 ],
@@ -1314,7 +1313,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1', 'c2'],
-                    onSingleHost: false,
                     category: 'fileFolders'
                   }
                 ],
@@ -1359,7 +1357,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1'],
-                    onSingleHost: true,
                     category: 'services'
                   }
                 ],
@@ -1384,7 +1381,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1', 'c2'],
-                    onSingleHost: false,
                     category: 'services'
                   }
                 ],
@@ -1422,7 +1418,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1'],
-                    onSingleHost: true,
                     category: 'users'
                   }
                 ],
@@ -1446,7 +1441,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1', 'c2'],
-                    onSingleHost: false,
                     category: 'users'
                   }
                 ],
@@ -1484,7 +1478,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1'],
-                    onSingleHost: true,
                     category: 'alternatives'
                   }
                 ],
@@ -1508,7 +1501,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1', 'c2'],
-                    onSingleHost: false,
                     category: 'alternatives'
                   }
                 ],
@@ -1556,7 +1548,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     pid: 'n1',
                     hosts: ['c1'],
-                    onSingleHost: true,
                     category: 'processes'
                   }
                 ],
@@ -1580,7 +1571,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     pid: 'n1',
                     hosts: ['c1', 'c2'],
-                    onSingleHost: false,
                     category: 'processes'
                   }
                 ],
@@ -1634,7 +1624,6 @@ describe('App.WizardStep3Controller', function () {
       expect(warnings.length).to.equal(1);
       expect(warnings[0].hosts).to.eql(['c1']);
       expect(warnings[0].hostsLong).to.eql(['c1']);
-      expect(warnings[0].onSingleHost).to.equal(true);
 
     });
 
@@ -1667,7 +1656,6 @@ describe('App.WizardStep3Controller', function () {
       expect(warnings.length).to.equal(1);
       expect(warnings[0].hosts).to.eql(['c1']);
       expect(warnings[0].hostsLong).to.eql(['c1']);
-      expect(warnings[0].onSingleHost).to.equal(true);
 
     });
 
@@ -1683,7 +1671,6 @@ describe('App.WizardStep3Controller', function () {
       expect(warnings.length).to.equal(1);
       expect(warnings[0].hosts).to.eql(['c1', 'c2']);
       expect(warnings[0].hostsLong).to.eql(['c1', 'c2']);
-      expect(warnings[0].onSingleHost).to.equal(false);
 
     });
 
@@ -1710,7 +1697,6 @@ describe('App.WizardStep3Controller', function () {
       expect(warnings.length).to.equal(1);
       expect(warnings[0].hosts).to.eql(['c1']);
       expect(warnings[0].hostsLong).to.eql(['c1']);
-      expect(warnings[0].onSingleHost).to.equal(true);
 
     });
 
@@ -1726,7 +1712,6 @@ describe('App.WizardStep3Controller', function () {
       expect(warnings.length).to.equal(1);
       expect(warnings[0].hosts).to.eql(['c1', 'c2']);
       expect(warnings[0].hostsLong).to.eql(['c1', 'c2']);
-      expect(warnings[0].onSingleHost).to.equal(false);
 
     });
   });
@@ -1784,7 +1769,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1'],
-                    onSingleHost: true,
                     category: 'fileFolders'
                   }
                 ],
@@ -1806,7 +1790,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1', 'c2'],
-                    onSingleHost: false,
                     category: 'fileFolders'
                   }
                 ],
@@ -1850,7 +1833,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1'],
-                    onSingleHost: true,
                     category: 'services'
                   }
                 ],
@@ -1872,7 +1854,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1', 'c2'],
-                    onSingleHost: false,
                     category: 'services'
                   }
                 ],
@@ -1906,7 +1887,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1'],
-                    onSingleHost: true,
                     category: 'users'
                   }
                 ],
@@ -1928,7 +1908,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1', 'c2'],
-                    onSingleHost: false,
                     category: 'users'
                   }
                 ],
@@ -1962,7 +1941,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1'],
-                    onSingleHost: true,
                     category: 'alternatives'
                   }
                 ],
@@ -1984,7 +1962,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     name: 'n1',
                     hosts: ['c1', 'c2'],
-                    onSingleHost: false,
                     category: 'alternatives'
                   }
                 ],
@@ -2028,7 +2005,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     pid: 'n1',
                     hosts: ['c1'],
-                    onSingleHost: true,
                     category: 'processes'
                   }
                 ],
@@ -2050,7 +2026,6 @@ describe('App.WizardStep3Controller', function () {
                   {
                     pid: 'n1',
                     hosts: ['c1', 'c2'],
-                    onSingleHost: false,
                     category: 'processes'
                   }
                 ],
@@ -2099,7 +2074,6 @@ describe('App.WizardStep3Controller', function () {
       expect(warnings.length).to.equal(1);
       expect(warnings[0].hosts).to.eql(['c1']);
       expect(warnings[0].hostsLong).to.eql(['c1']);
-      expect(warnings[0].onSingleHost).to.equal(true);
 
     });
 
@@ -2132,7 +2106,6 @@ describe('App.WizardStep3Controller', function () {
       expect(warnings.length).to.equal(1);
       expect(warnings[0].hosts).to.eql(['c1']);
       expect(warnings[0].hostsLong).to.eql(['c1']);
-      expect(warnings[0].onSingleHost).to.equal(true);
 
     });
 
@@ -2148,7 +2121,6 @@ describe('App.WizardStep3Controller', function () {
       expect(warnings.length).to.equal(1);
       expect(warnings[0].hosts).to.eql(['c1', 'c2']);
       expect(warnings[0].hostsLong).to.eql(['c1', 'c2']);
-      expect(warnings[0].onSingleHost).to.equal(false);
 
     });
 
@@ -2175,7 +2147,6 @@ describe('App.WizardStep3Controller', function () {
       expect(warnings.length).to.equal(1);
       expect(warnings[0].hosts).to.eql(['c1']);
       expect(warnings[0].hostsLong).to.eql(['c1']);
-      expect(warnings[0].onSingleHost).to.equal(true);
 
     });
 
@@ -2191,7 +2162,6 @@ describe('App.WizardStep3Controller', function () {
       expect(warnings.length).to.equal(1);
       expect(warnings[0].hosts).to.eql(['c1', 'c2']);
       expect(warnings[0].hostsLong).to.eql(['c1', 'c2']);
-      expect(warnings[0].onSingleHost).to.equal(false);
 
     });
 
@@ -3097,10 +3067,6 @@ describe('App.WizardStep3Controller', function () {
       expect(this.warnings.hostsNames.toArray()).to.be.eql(['h1', 'h4', 'h7']);
     });
 
-    it('warning appears on many hosts', function () {
-      expect(this.warnings.onSingleHost).to.be.false;
-    });
-
     it('validation context for hosts is valid', function () {
       var hosts = this.warnings.hosts;
       var expected = [


[07/41] ambari git commit: AMBARI-20652. HDP 3.0 TP - add Pig client: configs, metainfo, descriptors, service advisor and etc.(vbrodetskyi)

Posted by ao...@apache.org.
AMBARI-20652. HDP 3.0 TP - add Pig client: configs, metainfo, descriptors, service advisor and etc.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/27da4c8f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/27da4c8f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/27da4c8f

Branch: refs/heads/branch-3.0-perf
Commit: 27da4c8f2ac1d6b5ae93091bbdc04603bb8d4244
Parents: e0412e8
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Apr 3 15:37:51 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../PIG/0.16.1.3.0/configuration/pig-env.xml    |  41 +++++
 .../PIG/0.16.1.3.0/configuration/pig-log4j.xml  |  65 +++++++
 .../0.16.1.3.0/configuration/pig-properties.xml |  93 ++++++++++
 .../PIG/0.16.1.3.0/kerberos.json                |  17 ++
 .../common-services/PIG/0.16.1.3.0/metainfo.xml | 101 +++++++++++
 .../PIG/0.16.1.3.0/package/files/pigSmoke.sh    |  18 ++
 .../PIG/0.16.1.3.0/package/scripts/params.py    |  31 ++++
 .../0.16.1.3.0/package/scripts/params_linux.py  | 104 +++++++++++
 .../package/scripts/params_windows.py           |  45 +++++
 .../PIG/0.16.1.3.0/package/scripts/pig.py       |  80 +++++++++
 .../0.16.1.3.0/package/scripts/pig_client.py    |  71 ++++++++
 .../0.16.1.3.0/package/scripts/service_check.py | 142 +++++++++++++++
 .../PIG/0.16.1.3.0/service_advisor.py           | 171 +++++++++++++++++++
 .../stacks/HDP/3.0/services/PIG/metainfo.xml    |  27 +++
 14 files changed, 1006 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-env.xml b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-env.xml
new file mode 100644
index 0000000..11afa11
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-env.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_adding_forbidden="true">
+  <!-- pig-env.sh -->
+  <property>
+    <name>content</name>
+    <display-name>pig-env template</display-name>
+    <description>This is the jinja template for pig-env.sh file</description>
+    <value>
+JAVA_HOME={{java64_home}}
+HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
+
+if [ -d "/usr/lib/tez" ]; then
+  PIG_OPTS="$PIG_OPTS -Dmapreduce.framework.name=yarn"
+fi
+    </value>
+    <value-attributes>
+      <type>content</type>
+    </value-attributes>
+    <on-ambari-upgrade add="fallse"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-log4j.xml b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-log4j.xml
new file mode 100644
index 0000000..e5ff9b6
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-log4j.xml
@@ -0,0 +1,65 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>content</name>
+    <display-name>pig-log4j template</display-name>
+    <description>Custom log4j.properties</description>
+    <value>
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+
+# ***** Set root logger level to DEBUG and its only appender to A.
+log4j.logger.org.apache.pig=info, A
+
+# ***** A is set to be a ConsoleAppender.
+log4j.appender.A=org.apache.log4j.ConsoleAppender
+# ***** A uses PatternLayout.
+log4j.appender.A.layout=org.apache.log4j.PatternLayout
+log4j.appender.A.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-properties.xml b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-properties.xml
new file mode 100644
index 0000000..fb43f6e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-properties.xml
@@ -0,0 +1,93 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>content</name>
+    <display-name>pig-properties template</display-name>
+    <description>Describe all the Pig agent configurations</description>
+    <value>
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Pig default configuration file. All values can be overwritten by pig.properties and command line arguments.
+# see bin/pig -help
+
+# brief logging (no timestamps)
+brief=false
+
+# debug level, INFO is default
+debug=INFO
+
+# verbose print all log messages to screen (default to print only INFO and above to screen)
+verbose=false
+
+# exectype local|mapreduce|tez, mapreduce is default
+exectype=tez
+
+# Enable insertion of information about script into hadoop job conf
+pig.script.info.enabled=true
+
+# Do not spill temp files smaller than this size (bytes)
+pig.spill.size.threshold=5000000
+
+# EXPERIMENT: Activate garbage collection when spilling a file bigger than this size (bytes)
+# This should help reduce the number of files being spilled.
+pig.spill.gc.activation.size=40000000
+
+# the following two parameters are to help estimate the reducer number
+pig.exec.reducers.bytes.per.reducer=1000000000
+pig.exec.reducers.max=999
+
+# Temporary location to store the intermediate data.
+pig.temp.dir=/tmp/
+
+# Threshold for merging FRJoin fragment files
+pig.files.concatenation.threshold=100
+pig.optimistic.files.concatenation=false;
+
+pig.disable.counter=false
+
+hcat.bin=/usr/bin/hcat
+
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <empty-value-valid>true</empty-value-valid>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/kerberos.json b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/kerberos.json
new file mode 100644
index 0000000..22dd6cb
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/kerberos.json
@@ -0,0 +1,17 @@
+{
+  "services": [
+    {
+      "name": "PIG",
+      "components": [
+        {
+          "name": "PIG",
+          "identities": [
+            {
+              "name": "/HDFS/NAMENODE/hdfs"
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/metainfo.xml b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/metainfo.xml
new file mode 100644
index 0000000..041255a
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/metainfo.xml
@@ -0,0 +1,101 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>PIG</name>
+      <displayName>Pig</displayName>
+      <comment>Scripting platform for analyzing large datasets</comment>
+      <version>0.16.1.3.0</version>
+      <components>
+        <component>
+          <name>PIG</name>
+          <displayName>Pig Client</displayName>
+          <category>CLIENT</category>
+          <cardinality>0+</cardinality>
+          <versionAdvertised>true</versionAdvertised>
+          <commandScript>
+            <script>scripts/pig_client.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>1200</timeout>
+          </commandScript>
+          <configFiles>
+            <configFile>
+              <type>env</type>
+              <fileName>pig-env.sh</fileName>
+              <dictionaryName>pig-env</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>log4j.properties</fileName>
+              <dictionaryName>pig-log4j</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>pig.properties</fileName>
+              <dictionaryName>pig-properties</dictionaryName>
+            </configFile>                         
+          </configFiles>          
+        </component>
+      </components>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>redhat7,amazon2015,redhat6,suse11,suse12</osFamily>
+          <packages>
+            <package>
+              <name>pig_${stack_version}</name>
+            </package>
+            <package>
+              <name>datafu_${stack_version}</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>debian7,ubuntu12,ubuntu14,ubuntu16</osFamily>
+          <packages>
+            <package>
+              <name>pig-${stack_version}</name>
+            </package>
+            <package>
+              <name>datafu-${stack_version}</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <requiredServices>
+        <service>YARN</service>
+        <service>TEZ</service>
+      </requiredServices>
+
+      <configuration-dependencies>
+        <config-type>pig-env</config-type>
+        <config-type>pig-log4j</config-type>
+        <config-type>pig-properties</config-type>
+      </configuration-dependencies>
+
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/files/pigSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/files/pigSmoke.sh b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/files/pigSmoke.sh
new file mode 100644
index 0000000..a22456e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/files/pigSmoke.sh
@@ -0,0 +1,18 @@
+/*Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License */
+
+A = load 'passwd' using PigStorage(':');
+B = foreach A generate \$0 as id;
+store B into 'pigsmoke.out';

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/params.py
new file mode 100644
index 0000000..3aebda0
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/params.py
@@ -0,0 +1,31 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+from ambari_commons import OSCheck
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.copy_tarball import get_sysprep_skip_copy_tarballs_hdfs
+
+if OSCheck.is_windows_family():
+  from params_windows import *
+else:
+  from params_linux import *
+
+sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/params_linux.py
new file mode 100644
index 0000000..5af1274
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/params_linux.py
@@ -0,0 +1,104 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.functions.version import format_stack_version
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
+
+# server configurations
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+stack_name = default("/hostLevelParams/stack_name", None)
+stack_root = Script.get_stack_root()
+
+stack_version_unformatted = config['hostLevelParams']['stack_version']
+stack_version_formatted = format_stack_version(stack_version_unformatted)
+
+# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
+version = default("/commandParams/version", None)
+
+# hadoop default parameters
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+pig_conf_dir = "/etc/pig/conf"
+hadoop_home = '/usr'
+pig_bin_dir = ""
+
+# hadoop parameters for stack versions supporting rolling_upgrade
+if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
+  pig_conf_dir = format("{stack_root}/current/pig-client/conf")
+  hadoop_home = stack_select.get_hadoop_dir("home")
+  pig_bin_dir = format("{stack_root}/current/pig-client/bin")
+
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
+smoke_hdfs_user_mode = 0770
+user_group = config['configurations']['cluster-env']['user_group']
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+pig_env_sh_template = config['configurations']['pig-env']['content']
+
+# not supporting 32 bit jdk.
+java64_home = config['hostLevelParams']['java_home']
+
+pig_properties = config['configurations']['pig-properties']['content']
+
+log4j_props = config['configurations']['pig-log4j']['content']
+
+
+
+hdfs_site = config['configurations']['hdfs-site']
+default_fs = config['configurations']['core-site']['fs.defaultFS']
+
+dfs_type = default("/commandParams/dfs_type", "")
+
+import functools
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir,
+  principal_name = hdfs_principal_name,
+  hdfs_site = hdfs_site,
+  default_fs = default_fs,
+  immutable_paths = get_not_managed_resources(),
+  dfs_type = dfs_type
+ )
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/params_windows.py b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/params_windows.py
new file mode 100644
index 0000000..bd9654f
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/params_windows.py
@@ -0,0 +1,45 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+from resource_management.libraries.script.script import Script
+
+# server configurations
+config = Script.get_config()
+
+stack_root = None
+pig_home = None
+pig_conf_dir = None
+try:
+  stack_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+  pig_home = os.environ['PIG_HOME']
+  pig_conf_dir = os.path.join(pig_home,'conf')
+except:
+  pass
+
+pig_properties = config['configurations']['pig-properties']['content']
+
+if (('pig-log4j' in config['configurations']) and ('content' in config['configurations']['pig-log4j'])):
+  log4j_props = config['configurations']['pig-log4j']['content']
+else:
+  log4j_props = None
+
+hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
+pig_user = hadoop_user
+hdfs_user = hadoop_user

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/pig.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/pig.py b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/pig.py
new file mode 100644
index 0000000..b6825d5
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/pig.py
@@ -0,0 +1,80 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import os
+from resource_management.core.resources.system import Directory, File
+from resource_management.core.source import InlineTemplate
+from resource_management.libraries.functions.format import format
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
+def pig():
+  import params
+
+  Directory( params.pig_conf_dir,
+    create_parents = True,
+    owner = params.hdfs_user,
+    group = params.user_group
+  )
+
+  File(format("{pig_conf_dir}/pig-env.sh"),
+    owner=params.hdfs_user,
+    mode=0755,
+    content=InlineTemplate(params.pig_env_sh_template)
+  )
+
+  # pig_properties is always set to a default even if it's not in the payload
+  File(format("{params.pig_conf_dir}/pig.properties"),
+              mode=0644,
+              group=params.user_group,
+              owner=params.hdfs_user,
+              content=params.pig_properties
+  )
+
+  if (params.log4j_props != None):
+    File(format("{params.pig_conf_dir}/log4j.properties"),
+      mode=0644,
+      group=params.user_group,
+      owner=params.hdfs_user,
+      content=params.log4j_props
+    )
+  elif (os.path.exists(format("{params.pig_conf_dir}/log4j.properties"))):
+    File(format("{params.pig_conf_dir}/log4j.properties"),
+      mode=0644,
+      group=params.user_group,
+      owner=params.hdfs_user
+    )
+
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def pig():
+  import params
+  File(os.path.join(params.pig_conf_dir, "pig.properties"),
+       mode="f",
+       owner=params.pig_user,
+       content=params.pig_properties
+  )
+
+  if (params.log4j_props != None):
+    File(os.path.join(params.pig_conf_dir, "log4j.properties"),
+         mode='f',
+         owner=params.pig_user,
+         content=params.log4j_props
+    )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/pig_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/pig_client.py b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/pig_client.py
new file mode 100644
index 0000000..6fdcef8
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/pig_client.py
@@ -0,0 +1,71 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+import sys
+import os
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions.constants import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
+from pig import pig
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from resource_management.libraries.functions.default import default
+from resource_management.core.exceptions import ClientComponentHasNoStatus
+
+class PigClient(Script):
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    pig()
+
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class PigClientLinux(PigClient):
+  def get_component_name(self):
+    return "hadoop-client"
+
+  def pre_upgrade_restart(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+
+    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
+      conf_select.select(params.stack_name, "pig", params.version)
+      conf_select.select(params.stack_name, "hadoop", params.version)
+      stack_select.select("hadoop-client", params.version) # includes pig-client
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class PigClientWindows(PigClient):
+
+  def install(self, env):
+    import params
+    if params.pig_home is None:
+      self.install_packages(env)
+    self.configure(env)
+
+if __name__ == "__main__":
+  PigClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/service_check.py
new file mode 100644
index 0000000..ba4082e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/service_check.py
@@ -0,0 +1,142 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+import os
+
+from resource_management.core.resources.system import Execute, File
+from resource_management.core.source import InlineTemplate, StaticFile
+from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.resources.execute_hadoop import ExecuteHadoop
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.script.script import Script
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+
+class PigServiceCheck(Script):
+  pass
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class PigServiceCheckLinux(PigServiceCheck):
+  def service_check(self, env):
+    import params
+    env.set_params(params)
+
+    input_file = format('/user/{smokeuser}/passwd')
+    output_dir = format('/user/{smokeuser}/pigsmoke.out')
+
+    params.HdfsResource(format("/user/{smokeuser}"),
+                        type="directory",
+                        action="create_on_execute",
+                        owner=params.smokeuser,
+                        mode=params.smoke_hdfs_user_mode,
+                        )
+
+    params.HdfsResource(output_dir,
+                        type="directory",
+                        action="delete_on_execute",
+                        owner=params.smokeuser,
+                        )
+    params.HdfsResource(input_file,
+                        type="file",
+                        source="/etc/passwd",
+                        action="create_on_execute",
+                        owner=params.smokeuser,
+    )
+    params.HdfsResource(None, action="execute")
+ 
+    if params.security_enabled:
+      kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
+      Execute(kinit_cmd,
+        user=params.smokeuser
+      )
+
+    File( format("{tmp_dir}/pigSmoke.sh"),
+      content = StaticFile("pigSmoke.sh"),
+      mode = 0755
+    )
+
+    # check for Pig-on-M/R
+    Execute( format("pig {tmp_dir}/pigSmoke.sh"),
+      tries     = 3,
+      try_sleep = 5,
+      path      = format('{pig_bin_dir}:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
+      user      = params.smokeuser,
+      logoutput = True
+    )
+
+    test_cmd = format("fs -test -e {output_dir}")
+    ExecuteHadoop( test_cmd,
+      user      = params.smokeuser,
+      conf_dir = params.hadoop_conf_dir,
+      bin_dir = params.hadoop_bin_dir
+    )
+
+    if params.stack_version_formatted and check_stack_feature(StackFeature.PIG_ON_TEZ, params.stack_version_formatted):
+      # cleanup results from previous test
+      params.HdfsResource(output_dir,
+                          type="directory",
+                          action="delete_on_execute",
+                          owner=params.smokeuser,
+      )
+      params.HdfsResource(input_file,
+                          type="file",
+                          source="/etc/passwd",
+                          action="create_on_execute",
+                          owner=params.smokeuser,
+      )
+
+      # Check for Pig-on-Tez
+      resource_created = copy_to_hdfs(
+        "tez", params.user_group,
+        params.hdfs_user,
+        skip=params.sysprep_skip_copy_tarballs_hdfs)
+      if resource_created:
+        params.HdfsResource(None, action="execute")
+
+      Execute(format("pig -x tez {tmp_dir}/pigSmoke.sh"),
+        tries     = 3,
+        try_sleep = 5,
+        path      = format('{pig_bin_dir}:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
+        user      = params.smokeuser,
+        logoutput = True
+      )
+
+      ExecuteHadoop(test_cmd,
+        user      = params.smokeuser,
+        conf_dir = params.hadoop_conf_dir,
+        bin_dir = params.hadoop_bin_dir
+      )
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class PigServiceCheckWindows(PigServiceCheck):
+  def service_check(self, env):
+    import params
+    env.set_params(params)
+    smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd")
+    service = "PIG"
+    Execute(format("cmd /C {smoke_cmd} {service}", smoke_cmd=smoke_cmd, service=service), logoutput=True, user=params.pig_user, timeout=300)
+
+if __name__ == "__main__":
+  PigServiceCheck().execute()
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/service_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/service_advisor.py b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/service_advisor.py
new file mode 100644
index 0000000..8f89c5d
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/service_advisor.py
@@ -0,0 +1,171 @@
+#!/usr/bin/env ambari-python-wrap
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+# Python imports
+import imp
+import os
+import traceback
+import re
+import socket
+import fnmatch
+
+
+from resource_management.core.logger import Logger
+
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+STACKS_DIR = os.path.join(SCRIPT_DIR, '../../../stacks/')
+PARENT_FILE = os.path.join(STACKS_DIR, 'service_advisor.py')
+
+try:
+  with open(PARENT_FILE, 'rb') as fp:
+    service_advisor = imp.load_module('service_advisor', fp, PARENT_FILE, ('.py', 'rb', imp.PY_SOURCE))
+except Exception as e:
+  traceback.print_exc()
+  print "Failed to load parent"
+
+class PigServiceAdvisor(service_advisor.ServiceAdvisor):
+
+  def __init__(self, *args, **kwargs):
+    self.as_super = super(PigServiceAdvisor, self)
+    self.as_super.__init__(*args, **kwargs)
+
+    # Always call these methods
+    self.modifyMastersWithMultipleInstances()
+    self.modifyCardinalitiesDict()
+    self.modifyHeapSizeProperties()
+    self.modifyNotValuableComponents()
+    self.modifyComponentsNotPreferableOnServer()
+    self.modifyComponentLayoutSchemes()
+
+  def modifyMastersWithMultipleInstances(self):
+    """
+    Modify the set of masters with multiple instances.
+    Must be overriden in child class.
+    """
+    # Nothing to do
+    pass
+
+  def modifyCardinalitiesDict(self):
+    """
+    Modify the dictionary of cardinalities.
+    Must be overriden in child class.
+    """
+    # Nothing to do
+    pass
+
+  def modifyHeapSizeProperties(self):
+    """
+    Modify the dictionary of heap size properties.
+    Must be overriden in child class.
+    """
+    pass
+
+  def modifyNotValuableComponents(self):
+    """
+    Modify the set of components whose host assignment is based on other services.
+    Must be overriden in child class.
+    """
+    # Nothing to do
+    pass
+
+  def modifyComponentsNotPreferableOnServer(self):
+    """
+    Modify the set of components that are not preferable on the server.
+    Must be overriden in child class.
+    """
+    # Nothing to do
+    pass
+
+  def modifyComponentLayoutSchemes(self):
+    """
+    Modify layout scheme dictionaries for components.
+    The scheme dictionary basically maps the number of hosts to
+    host index where component should exist.
+    Must be overriden in child class.
+    """
+    # Nothing to do
+    pass
+
+  def getServiceComponentLayoutValidations(self, services, hosts):
+    """
+    Get a list of errors.
+    Must be overriden in child class.
+    """
+
+    return []
+
+  def getServiceConfigurationRecommendations(self, configurations, clusterData, services, hosts):
+    """
+    Entry point.
+    Must be overriden in child class.
+    """
+    #Logger.info("Class: %s, Method: %s. Recommending Service Configurations." %
+    #            (self.__class__.__name__, inspect.stack()[0][3]))
+
+    recommender = PigRecommender()
+
+
+  def getServiceConfigurationsValidationItems(self, configurations, recommendedDefaults, services, hosts):
+    """
+    Entry point.
+    Validate configurations for the service. Return a list of errors.
+    The code for this function should be the same for each Service Advisor.
+    """
+    #Logger.info("Class: %s, Method: %s. Validating Configurations." %
+    #            (self.__class__.__name__, inspect.stack()[0][3]))
+
+    validator = PigValidator()
+    # Calls the methods of the validator using arguments,
+    # method(siteProperties, siteRecommendations, configurations, services, hosts)
+    return validator.validateListOfConfigUsingMethod(configurations, recommendedDefaults, services, hosts, validator.validators)
+
+
+
+class PigRecommender(service_advisor.ServiceAdvisor):
+  """
+  Pig Recommender suggests properties when adding the service for the first time or modifying configs via the UI.
+  """
+
+  def __init__(self, *args, **kwargs):
+    self.as_super = super(PigRecommender, self)
+    self.as_super.__init__(*args, **kwargs)
+
+
+
+
+
+
+
+class PigValidator(service_advisor.ServiceAdvisor):
+  """
+  Pig Validator checks the correctness of properties whenever the service is first added or the user attempts to
+  change configs via the UI.
+  """
+
+  def __init__(self, *args, **kwargs):
+    self.as_super = super(PigValidator, self)
+    self.as_super.__init__(*args, **kwargs)
+
+    self.validators = []
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/27da4c8f/ambari-server/src/main/resources/stacks/HDP/3.0/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/3.0/services/PIG/metainfo.xml
new file mode 100644
index 0000000..fd664d0
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/services/PIG/metainfo.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>PIG</name>
+      <version>0.16.1.3.0</version>
+      <extends>common-services/PIG/0.16.1.3.0</extends>
+    </service>
+  </services>
+</metainfo>


[19/41] ambari git commit: AMBARI-20655.Need to improve the grouping of the nodes available in transition section of WFM(M Madhan Mohan Reddy via padmapriyanitt)

Posted by ao...@apache.org.
AMBARI-20655.Need to improve the grouping of the nodes available in transition section of WFM(M Madhan Mohan Reddy via padmapriyanitt)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/632ba71f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/632ba71f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/632ba71f

Branch: refs/heads/branch-3.0-perf
Commit: 632ba71fd66967a538bb9469707f94762f7602ae
Parents: 6e5c12f
Author: padmapriyanitt <pa...@gmail.com>
Authored: Tue Apr 4 11:47:45 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../templates/components/transition-config.hbs  | 34 +++++++++++---------
 1 file changed, 19 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/632ba71f/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/transition-config.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/transition-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/transition-config.hbs
index 83fe96c..7618481 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/transition-config.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/transition-config.hbs
@@ -19,14 +19,16 @@
   <label class="control-label col-xs-2">Error To<span class="requiredField">&nbsp;*</span></label>
   <div class=" col-xs-7">
     <select onchange={{action "errorToHandler" value="target.value"}} name="select-node" class="form-control" data-show-icon="true">
-      <optgroup label="Kill Nodes"></optgroup>
-      {{#each killNodes as |node index|}}
-      <option value={{node.name}} selected={{eq node.name transition.errorNode.name}}>{{node.name}}</option>
-      {{/each}}
-      <optgroup label="Other Nodes"></optgroup>
-      {{#each descendantNodes as |node index|}}
-      <option value={{node.name}} selected={{eq node.name transition.errorNode.name}}>{{node.name}}</option>
-      {{/each}}
+      <optgroup label="Kill Nodes">
+        {{#each killNodes as |node index|}}
+        <option value={{node.name}} selected={{eq node.name transition.errorNode.name}}>{{node.name}}</option>
+        {{/each}}
+      </optgroup>
+      <optgroup label="Other Nodes">
+        {{#each descendantNodes as |node index|}}
+        <option value={{node.name}} selected={{eq node.name transition.errorNode.name}}>{{node.name}}</option>
+        {{/each}}
+      </optgroup>
     </select>
     {{#if (eq transition.errorNode.type 'kill')}}
       <span class="kill-message">{{transition.errorNode.killMessage}}</span>
@@ -37,14 +39,16 @@
   <label class="control-label col-xs-2">Ok To<span class="requiredField">&nbsp;*</span></label>
   <div class=" col-xs-7">
     <select onchange={{action "okToHandler" value="target.value"}} name="select-node" class="form-control" data-show-icon="true">
-      <optgroup label="Kill Nodes"></optgroup>
-      {{#each killNodes as |node index|}}
-      <option value={{node.name}} selected={{eq node.name transition.okToNode.name}}>{{node.name}}</option>
-      {{/each}}
-      <optgroup label="Other Nodes"></optgroup>
-      {{#each currentNode.validOkToNodes as |node index|}}
+      <optgroup label="Kill Nodes">
+        {{#each killNodes as |node index|}}
         <option value={{node.name}} selected={{eq node.name transition.okToNode.name}}>{{node.name}}</option>
-      {{/each}}
+        {{/each}}
+      </optgroup>
+      <optgroup label="Other Nodes">
+        {{#each currentNode.validOkToNodes as |node index|}}
+          <option value={{node.name}} selected={{eq node.name transition.okToNode.name}}>{{node.name}}</option>
+        {{/each}}
+      </optgroup>
     </select>
     {{#if showWarning}}
       <span class="ok-to-warning">


[30/41] ambari git commit: AMBARI-20681 Select Version step of installer: repo URL validation message issues. (ababiichuk)

Posted by ao...@apache.org.
AMBARI-20681 Select Version step of installer: repo URL validation message issues. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5f99b1a5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5f99b1a5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5f99b1a5

Branch: refs/heads/branch-3.0-perf
Commit: 5f99b1a53d9b5862c26f6ef92e7af1141df59249
Parents: c3361d9
Author: ababiichuk <ab...@hortonworks.com>
Authored: Wed Apr 5 15:05:29 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/views/wizard/step1_view.js | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5f99b1a5/ambari-web/app/views/wizard/step1_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/wizard/step1_view.js b/ambari-web/app/views/wizard/step1_view.js
index 0161985..584f621 100644
--- a/ambari-web/app/views/wizard/step1_view.js
+++ b/ambari-web/app/views/wizard/step1_view.js
@@ -213,9 +213,12 @@ App.WizardStep1View = Em.View.extend({
   popoverView: Em.View.extend({
     tagName: 'i',
     classNameBindings: ['repository.validationClassName'],
-    attributeBindings: ['repository.errorTitle:title', 'repository.errorContent:data-content'],
+    attributeBindings: ['repository.errorTitle:data-original-title', 'repository.errorContent:data-content'],
     didInsertElement: function () {
-      App.popover($(this.get('element')), {'trigger': 'hover'});
+      App.popover($(this.get('element')), {
+        template: '<div class="popover"><div class="arrow"></div><div class="popover-inner"><h3 class="popover-title"></h3><div class="popover-content"></div></div></div>',
+        trigger: 'hover'
+      });
     }
   }),
 


[29/41] ambari git commit: AMBARI-20652. Test fix.HDP 3.0 TP - add Pig client: configs, metainfo, descriptors, service advisor and etc.(vbrodetskyi)

Posted by ao...@apache.org.
AMBARI-20652. Test fix.HDP 3.0 TP - add Pig client: configs, metainfo, descriptors, service advisor and etc.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/88b82fe1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/88b82fe1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/88b82fe1

Branch: refs/heads/branch-3.0-perf
Commit: 88b82fe127539091aca0c28ebad2b8e00c30f1e5
Parents: 314e41e
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Apr 3 19:28:26 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../common-services/PIG/0.16.1.3.0/configuration/pig-env.xml       | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/88b82fe1/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-env.xml b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-env.xml
index 11afa11..5eafd9e 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-env.xml
+++ b/ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/configuration/pig-env.xml
@@ -36,6 +36,6 @@ fi
     <value-attributes>
       <type>content</type>
     </value-attributes>
-    <on-ambari-upgrade add="fallse"/>
+    <on-ambari-upgrade add="false"/>
   </property>
 </configuration>


[31/41] ambari git commit: AMBARI-20592 ServicePresences check should also check for services dropped from a release (dili)

Posted by ao...@apache.org.
AMBARI-20592 ServicePresences check should also check for services dropped from a release (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/45d8baf1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/45d8baf1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/45d8baf1

Branch: refs/heads/branch-3.0-perf
Commit: 45d8baf1ab64c42bba012fef95a74e64063e78f6
Parents: 01e0c69
Author: Di Li <di...@apache.org>
Authored: Tue Apr 4 13:58:58 2017 -0400
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../ambari/server/checks/CheckDescription.java  |  5 +-
 .../server/checks/ServicePresenceCheck.java     | 52 +++++++++++++++++---
 .../server/checks/ServicePresenceCheckTest.java | 41 ++++++++++++---
 3 files changed, 83 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/45d8baf1/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
index 71c5857..e17185f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
@@ -280,9 +280,12 @@ public class CheckDescription {
             "This service does not support upgrades and must be removed before the upgrade can continue. " +
             "After upgrading, %s can be reinstalled")
         .put(ServicePresenceCheck.KEY_SERVICE_REMOVED,
+             "The %s service is currently installed on the cluster. " +
+             "This service is removed from the new release and must be removed before the upgrade can continue.")
+        .put(ServicePresenceCheck.KEY_SERVICE_REPLACED,
             "The %s service is currently installed on the cluster. " +
             "This service is removed from the new release and must be removed before the upgrade can continue. " +
-            "After upgrading, %s can be installed").build());
+            "After upgrading, %s can be installed as the replacement.").build());
 
   public static CheckDescription RANGER_SERVICE_AUDIT_DB_CHECK = new CheckDescription("RANGER_SERVICE_AUDIT_DB_CHECK",
     PrereqCheckType.SERVICE,

http://git-wip-us.apache.org/repos/asf/ambari/blob/45d8baf1/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java
index ed38882..5ff1f0b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java
@@ -46,7 +46,9 @@ public class ServicePresenceCheck extends AbstractCheckDescriptor{
 
   private static final Logger LOG = LoggerFactory.getLogger(ServicePresenceCheck.class);
 
+  static final String KEY_SERVICE_REPLACED = "service_replaced";
   static final String KEY_SERVICE_REMOVED = "service_removed";
+
   /*
    * List of services that do not support upgrade
    * services must be removed before the stack upgrade
@@ -59,6 +61,11 @@ public class ServicePresenceCheck extends AbstractCheckDescriptor{
   static final String REMOVED_SERVICES_PROPERTY_NAME = "removed-service-names";
 
   /*
+   * List of services replaced by other services in the new release
+   * */
+  static final String REPLACED_SERVICES_PROPERTY_NAME = "replaced-service-names";
+
+  /*
    * Such as Spark to Spark2
    */
   static final String NEW_SERVICES_PROPERTY_NAME = "new-service-names";
@@ -73,7 +80,9 @@ public class ServicePresenceCheck extends AbstractCheckDescriptor{
     Set<String> installedServices = cluster.getServices().keySet();
 
     List<String> noUpgradeSupportServices = getNoUpgradeSupportServices(request);
-    Map<String, String> removedServices = getRemovedServices(request);
+    Map<String, String> replacedServices = getReplacedServices(request);
+    List<String> removedServices = getRemovedServices(request);
+
     List<String> failReasons = new ArrayList<>();
 
     String reason = getFailReason(prerequisiteCheck, request);
@@ -85,8 +94,8 @@ public class ServicePresenceCheck extends AbstractCheckDescriptor{
       }
     }
 
-    reason = getFailReason(KEY_SERVICE_REMOVED, prerequisiteCheck, request);
-    for (Map.Entry<String, String> entry : removedServices.entrySet()) {
+    reason = getFailReason(KEY_SERVICE_REPLACED, prerequisiteCheck, request);
+    for (Map.Entry<String, String> entry : replacedServices.entrySet()) {
       String removedService = entry.getKey();
       if(installedServices.contains(removedService.toUpperCase())){
         prerequisiteCheck.getFailedOn().add(removedService);
@@ -96,6 +105,15 @@ public class ServicePresenceCheck extends AbstractCheckDescriptor{
       }
     }
 
+    reason = getFailReason(KEY_SERVICE_REMOVED, prerequisiteCheck, request);
+    for(String service: removedServices){
+      if (installedServices.contains(service.toUpperCase())){
+        prerequisiteCheck.getFailedOn().add(service);
+        String msg = String.format(reason, service);
+        failReasons.add(msg);
+      }
+    }
+
     if(!failReasons.isEmpty()){
       prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
       prerequisiteCheck.setFailReason(StringUtils.join(failReasons, '\n'));
@@ -138,28 +156,46 @@ public class ServicePresenceCheck extends AbstractCheckDescriptor{
   }
 
   /**
+  +   * @return service names
+  +   * */
+  private List<String> getRemovedServices(PrereqCheckRequest request){
+    List<String> result = new ArrayList<String>();
+    String value = getPropertyValue(request, REMOVED_SERVICES_PROPERTY_NAME);
+    if (null != value){
+      String[] services = value.split(",");
+      for(String service: services){
+        service = service.trim();
+        if (!service.isEmpty()){
+          result.add(service);
+        }
+      }
+    }
+    return result;
+  }
+
+  /**
    * @return service names and new service names map
    * */
-  private Map<String, String> getRemovedServices(PrereqCheckRequest request) throws AmbariException{
+  private Map<String, String> getReplacedServices(PrereqCheckRequest request) throws AmbariException{
     Map<String, String> result = new LinkedHashMap<>();
-    String value = getPropertyValue(request, REMOVED_SERVICES_PROPERTY_NAME);
+    String value = getPropertyValue(request, REPLACED_SERVICES_PROPERTY_NAME);
     String newValue = getPropertyValue(request, NEW_SERVICES_PROPERTY_NAME);
     if(value == null && newValue == null){
       return result; //no need to check removed services as they are not specified in the upgrade xml file.
     } else {
       if (value == null || newValue == null){
-        throw new AmbariException(String.format("Both %s and %s list must be specified in the upgrade XML file.", REMOVED_SERVICES_PROPERTY_NAME, NEW_SERVICES_PROPERTY_NAME));
+        throw new AmbariException(String.format("Both %s and %s list must be specified in the upgrade XML file.", REPLACED_SERVICES_PROPERTY_NAME, NEW_SERVICES_PROPERTY_NAME));
       } else {
         List<String> oldServices = Arrays.asList(value.split(","));
         List<String> newServices = Arrays.asList(newValue.split(","));
         if (oldServices.size() != newServices.size()){
-          throw new AmbariException(String.format("%s must have the same number of services as the %s list.", NEW_SERVICES_PROPERTY_NAME, REMOVED_SERVICES_PROPERTY_NAME));
+          throw new AmbariException(String.format("%s must have the same number of services as the %s list.", NEW_SERVICES_PROPERTY_NAME, REPLACED_SERVICES_PROPERTY_NAME));
         } else {
           for (int i = 0; i < oldServices.size(); i++){
             String oldService = oldServices.get(i).trim();
             String newService = newServices.get(i).trim();
             if (oldService.isEmpty() || newService.isEmpty()) {
-              throw new AmbariException(String.format("Make sure both %s and %s list only contain comma separated list of services.", NEW_SERVICES_PROPERTY_NAME, REMOVED_SERVICES_PROPERTY_NAME));
+              throw new AmbariException(String.format("Make sure both %s and %s list only contain comma separated list of services.", NEW_SERVICES_PROPERTY_NAME, REPLACED_SERVICES_PROPERTY_NAME));
             } else {
               result.put(oldService, newService);
             }

http://git-wip-us.apache.org/repos/asf/ambari/blob/45d8baf1/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicePresenceCheckTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicePresenceCheckTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicePresenceCheckTest.java
index d89f0e5..ed2fcb9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicePresenceCheckTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicePresenceCheckTest.java
@@ -64,7 +64,8 @@ public class ServicePresenceCheckTest {
 
     Map<String, String> checkProperties = new HashMap<>();
     checkProperties.put(ServicePresenceCheck.NO_UPGRADE_SUPPORT_SERVICES_PROPERTY_NAME,"MyServiceOne, MyServiceTwo");
-    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"OldServiceOne, OldServiceTwo");
+    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"RemovedServiceOne, RemovedServiceTwo");
+    checkProperties.put(ServicePresenceCheck.REPLACED_SERVICES_PROPERTY_NAME,"OldServiceOne, OldServiceTwo");
     checkProperties.put(ServicePresenceCheck.NEW_SERVICES_PROPERTY_NAME,"NewServiceOne, NewServiceTwo");
 
     PrerequisiteCheckConfig prerequisiteCheckConfig = Mockito.mock(PrerequisiteCheckConfig.class);
@@ -105,7 +106,7 @@ public class ServicePresenceCheckTest {
   }
 
   @Test
-  public void testPerformHasRemovedServices() throws Exception {
+  public void testPerformHasReplacedServices() throws Exception {
     final Cluster cluster = Mockito.mock(Cluster.class);
     Mockito.when(cluster.getClusterId()).thenReturn(1L);
     Mockito.when(m_clusters.getCluster("cluster")).thenReturn(cluster);
@@ -116,7 +117,7 @@ public class ServicePresenceCheckTest {
     Mockito.when(cluster.getServices()).thenReturn(services);
 
     Map<String, String> checkProperties = new HashMap<>();
-    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"Atlas, OldService");
+    checkProperties.put(ServicePresenceCheck.REPLACED_SERVICES_PROPERTY_NAME,"Atlas, OldService");
     checkProperties.put(ServicePresenceCheck.NEW_SERVICES_PROPERTY_NAME,"Atlas2, NewService");
 
     PrerequisiteCheckConfig prerequisiteCheckConfig = Mockito.mock(PrerequisiteCheckConfig.class);
@@ -132,6 +133,32 @@ public class ServicePresenceCheckTest {
   }
 
   @Test
+  public void testPerformHasRemovedServices() throws Exception {
+    final Cluster cluster = Mockito.mock(Cluster.class);
+    Mockito.when(cluster.getClusterId()).thenReturn(1L);
+    Mockito.when(m_clusters.getCluster("cluster")).thenReturn(cluster);
+
+    Map<String, Service> services = new HashMap<String, Service>();
+    services.put("ATLAS", Mockito.mock(Service.class));
+    services.put("OLDSERVICE", Mockito.mock(Service.class));
+    Mockito.when(cluster.getServices()).thenReturn(services);
+
+    Map<String, String> checkProperties = new HashMap<String, String>();
+    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"OldService");
+
+    PrerequisiteCheckConfig prerequisiteCheckConfig = Mockito.mock(PrerequisiteCheckConfig.class);
+    Mockito.when(prerequisiteCheckConfig.getCheckProperties(
+        m_check.getClass().getName())).thenReturn(checkProperties);
+
+    PrerequisiteCheck check = new PrerequisiteCheck(null, null);
+    PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+    request.setPrerequisiteCheckConfig(prerequisiteCheckConfig);
+
+    m_check.perform(check, request);
+    Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
+  }
+
+  @Test
   public void testPerformMixOne() throws Exception {
     final Cluster cluster = Mockito.mock(Cluster.class);
     Mockito.when(cluster.getClusterId()).thenReturn(1L);
@@ -139,12 +166,14 @@ public class ServicePresenceCheckTest {
 
     Map<String, Service> services = new HashMap<>();
     services.put("ATLAS", Mockito.mock(Service.class));
+    services.put("REMOVEDSERVICE", Mockito.mock(Service.class));
     Mockito.when(cluster.getServices()).thenReturn(services);
 
     Map<String, String> checkProperties = new HashMap<>();
     checkProperties.put(ServicePresenceCheck.NO_UPGRADE_SUPPORT_SERVICES_PROPERTY_NAME,"MyServiceOne, MyServiceTwo");
-    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"Atlas, OldService");
+    checkProperties.put(ServicePresenceCheck.REPLACED_SERVICES_PROPERTY_NAME,"Atlas, OldService");
     checkProperties.put(ServicePresenceCheck.NEW_SERVICES_PROPERTY_NAME,"Atlas2, NewService");
+    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"RemovedService");
 
     PrerequisiteCheckConfig prerequisiteCheckConfig = Mockito.mock(PrerequisiteCheckConfig.class);
     Mockito.when(prerequisiteCheckConfig.getCheckProperties(
@@ -170,7 +199,7 @@ public class ServicePresenceCheckTest {
 
     Map<String, String> checkProperties = new HashMap<>();
     checkProperties.put(ServicePresenceCheck.NO_UPGRADE_SUPPORT_SERVICES_PROPERTY_NAME,"Atlas, MyService");
-    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"OldService");
+    checkProperties.put(ServicePresenceCheck.REPLACED_SERVICES_PROPERTY_NAME,"OldService");
     checkProperties.put(ServicePresenceCheck.NEW_SERVICES_PROPERTY_NAME,"NewService");
 
     PrerequisiteCheckConfig prerequisiteCheckConfig = Mockito.mock(PrerequisiteCheckConfig.class);
@@ -200,7 +229,7 @@ public class ServicePresenceCheckTest {
 
     Map<String, String> checkProperties = new HashMap<>();
     checkProperties.put(ServicePresenceCheck.NO_UPGRADE_SUPPORT_SERVICES_PROPERTY_NAME,"Atlas, HDFS");
-    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"Storm, Ranger");
+    checkProperties.put(ServicePresenceCheck.REPLACED_SERVICES_PROPERTY_NAME,"Storm, Ranger");
     checkProperties.put(ServicePresenceCheck.NEW_SERVICES_PROPERTY_NAME,"Storm2, Ranger2");
 
     PrerequisiteCheckConfig prerequisiteCheckConfig = Mockito.mock(PrerequisiteCheckConfig.class);


[02/41] ambari git commit: AMBARI-20676.User should be able to visualize inherited properties while submitting the workflow .(M Madhan Mohan Reddy via padmapriyanitt)

Posted by ao...@apache.org.
AMBARI-20676.User should be able to visualize inherited properties while submitting the workflow
.(M Madhan Mohan Reddy via padmapriyanitt)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8c58e67b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8c58e67b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8c58e67b

Branch: refs/heads/branch-3.0-perf
Commit: 8c58e67be8c3273e2b158e157fafaaa94e3f3190
Parents: c7021a6
Author: padmapriyanitt <pa...@gmail.com>
Authored: Thu Apr 6 12:12:24 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../resources/ui/app/components/job-config.js   | 23 +++++++++-------
 .../src/main/resources/ui/app/routes/index.js   |  5 +++-
 .../ui/app/services/workflow-manager-configs.js | 27 +++++++++++++++++++
 .../services/workflow-manager-configs-test.js   | 28 ++++++++++++++++++++
 4 files changed, 72 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8c58e67b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
index 6aed9da..326cf38 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
@@ -35,6 +35,7 @@ const Validations = buildValidations({
 
 
 export default Ember.Component.extend(Validations, {
+  workflowManagerConfigs : Ember.inject.service('workflow-manager-configs'),
   systemConfigs : Ember.A([]),
   showingFileBrowser : false,
   overwritePath : false,
@@ -118,10 +119,10 @@ export default Ember.Component.extend(Validations, {
     var jobProperties = [];
     var jobParams = this.get("jobConfigs").params, self = this;
     this.get("jobProps").forEach(function(value) {
-      if (value!== Constants.defaultNameNodeValue && value!==Constants.rmDefaultValue){
-        var propName = value.trim().substring(2, value.length-1);
-        var isRequired = true;
-        var val = null;
+      var propName = value.trim().substring(2, value.length-1);
+      var isRequired = true;
+      var val = null;
+      if (value!== Constants.defaultNameNodeValue && value!==Constants.rmDefaultValue) {
         if(jobParams && jobParams.configuration && jobParams.configuration.property){
           var param = jobParams.configuration.property.findBy('name', propName);
           if(param && param.value){
@@ -138,13 +139,15 @@ export default Ember.Component.extend(Validations, {
             val = propVal.value
           }
         }
-        var prop= Ember.Object.create({
-          name: propName,
-          value: val,
-          isRequired : isRequired
-        });
-        jobProperties.push(prop);
+      } else {
+        val = self.get("workflowManagerConfigs").getWfmConfigs()[propName];
       }
+      var prop= Ember.Object.create({
+        name: propName,
+        value: val,
+        isRequired : isRequired
+      });
+      jobProperties.push(prop);
     });
     return jobProperties;
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/8c58e67b/contrib/views/wfmanager/src/main/resources/ui/app/routes/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/routes/index.js b/contrib/views/wfmanager/src/main/resources/ui/app/routes/index.js
index 6d94dfe..8e7f5d4 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/routes/index.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/routes/index.js
@@ -18,10 +18,13 @@
 import Ember from 'ember';
 
 export default Ember.Route.extend({
+    workflowManagerConfigs : Ember.inject.service('workflow-manager-configs'),
     afterModel(){
       let workflowManagerConfigsPromise = this.getWorkflowManagerConfigs();
       workflowManagerConfigsPromise.then(function(data){
-        this.invokeServiceChecksPromises(JSON.parse(data));
+        var jsonData = JSON.parse(data);
+        this.get('workflowManagerConfigs').setWfmConfigs(jsonData);
+        this.invokeServiceChecksPromises(jsonData);
       }.bind(this)).catch(function(errors){
         this.controllerFor('index').set('errors', errors);
       }.bind(this));

http://git-wip-us.apache.org/repos/asf/ambari/blob/8c58e67b/contrib/views/wfmanager/src/main/resources/ui/app/services/workflow-manager-configs.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/services/workflow-manager-configs.js b/contrib/views/wfmanager/src/main/resources/ui/app/services/workflow-manager-configs.js
new file mode 100644
index 0000000..fdd7d99
--- /dev/null
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/services/workflow-manager-configs.js
@@ -0,0 +1,27 @@
+/*
+*    Licensed to the Apache Software Foundation (ASF) under one or more
+*    contributor license agreements.  See the NOTICE file distributed with
+*    this work for additional information regarding copyright ownership.
+*    The ASF licenses this file to You under the Apache License, Version 2.0
+*    (the "License"); you may not use this file except in compliance with
+*    the License.  You may obtain a copy of the License at
+*
+*        http://www.apache.org/licenses/LICENSE-2.0
+*
+*    Unless required by applicable law or agreed to in writing, software
+*    distributed under the License is distributed on an "AS IS" BASIS,
+*    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+*    See the License for the specific language governing permissions and
+*    limitations under the License.
+*/
+import Ember from 'ember';
+
+export default Ember.Service.extend({
+  wfmConfigs: null,
+	setWfmConfigs(wfmConfigs){
+    this.set("wfmConfigs", wfmConfigs);
+	},
+	getWfmConfigs(){
+    return this.get("wfmConfigs");
+	}
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/8c58e67b/contrib/views/wfmanager/src/main/resources/ui/tests/unit/services/workflow-manager-configs-test.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/tests/unit/services/workflow-manager-configs-test.js b/contrib/views/wfmanager/src/main/resources/ui/tests/unit/services/workflow-manager-configs-test.js
new file mode 100644
index 0000000..a25fdc8
--- /dev/null
+++ b/contrib/views/wfmanager/src/main/resources/ui/tests/unit/services/workflow-manager-configs-test.js
@@ -0,0 +1,28 @@
+/*
+ *    Licensed to the Apache Software Foundation (ASF) under one or more
+ *    contributor license agreements.  See the NOTICE file distributed with
+ *    this work for additional information regarding copyright ownership.
+ *    The ASF licenses this file to You under the Apache License, Version 2.0
+ *    (the "License"); you may not use this file except in compliance with
+ *    the License.  You may obtain a copy of the License at
+ *
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *    Unless required by applicable law or agreed to in writing, software
+ *    distributed under the License is distributed on an "AS IS" BASIS,
+ *    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *    See the License for the specific language governing permissions and
+ *    limitations under the License.
+ */
+import { moduleFor, test } from 'ember-qunit';
+
+moduleFor('service:workflow-manager-configs', 'Unit | Service | workflow manager configs', {
+  // Specify the other units that are required for this test.
+  // needs: ['service:foo']
+});
+
+// Replace this with your real tests.
+test('it exists', function(assert) {
+  let service = this.subject();
+  assert.ok(service);
+});


[35/41] ambari git commit: AMBARI-20651 - When in Upgrade paused state, links should be disabled for less privileged user (rzang)

Posted by ao...@apache.org.
AMBARI-20651 - When in Upgrade paused state, links should be disabled for less privileged user (rzang)

Change-Id: I4f6460680f420a5664923136756aaa1e9cb9af09


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8a2dfa4e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8a2dfa4e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8a2dfa4e

Branch: refs/heads/branch-3.0-perf
Commit: 8a2dfa4e74784be9c8b11450589573708b17b276
Parents: 259dc90
Author: Richard Zang <rz...@apache.org>
Authored: Tue Apr 4 15:19:12 2017 -0700
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/controllers/application.js | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8a2dfa4e/ambari-web/app/controllers/application.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/application.js b/ambari-web/app/controllers/application.js
index 580f337..d813d89 100644
--- a/ambari-web/app/controllers/application.js
+++ b/ambari-web/app/controllers/application.js
@@ -83,24 +83,24 @@ App.ApplicationController = Em.Controller.extend(App.Persist, {
     var upgradeSuspended = App.get('upgradeSuspended');
     var isDowngrade = App.router.get('mainAdminStackAndUpgradeController.isDowngrade');
     var typeSuffix = isDowngrade ? 'downgrade' : 'upgrade';
-
+    var hasUpgradePrivilege = App.isAuthorized('CLUSTER.UPGRADE_DOWNGRADE_STACK');
     if (upgradeInProgress) {
       return {
-        cls: 'upgrade-in-progress',
+        cls: hasUpgradePrivilege? 'upgrade-in-progress' : 'upgrade-in-progress not-allowed-cursor',
         icon: 'glyphicon-cog',
         msg: Em.I18n.t('admin.stackVersions.version.' + typeSuffix + '.running')
       }
     }
     if (upgradeHolding) {
       return {
-        cls: 'upgrade-holding',
+        cls: hasUpgradePrivilege? 'upgrade-holding' : 'upgrade-holding not-allowed-cursor',
         icon: 'glyphicon-pause',
         msg: Em.I18n.t('admin.stackVersions.version.' + typeSuffix + '.pause')
       }
     }
     if (upgradeSuspended) {
       return {
-        cls: 'upgrade-aborted',
+        cls: hasUpgradePrivilege? 'upgrade-aborted' : 'upgrade-aborted not-allowed-cursor',
         icon: 'glyphicon-pause',
         msg: Em.I18n.t('admin.stackVersions.version.' + typeSuffix + '.suspended')
       }


[14/41] ambari git commit: AMBARI-20625. Hadoop QA javac warnings increase check is broken -- fix for ambari-server

Posted by ao...@apache.org.
AMBARI-20625. Hadoop QA javac warnings increase check is broken -- fix for ambari-server


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/50a41a84
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/50a41a84
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/50a41a84

Branch: refs/heads/branch-3.0-perf
Commit: 50a41a8426686d7a904fb15926c5a4b95d335c17
Parents: 1352fa9
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Thu Mar 30 15:24:55 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 dev-support/test-patch.sh | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/50a41a84/dev-support/test-patch.sh
----------------------------------------------------------------------
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index 294c840..9b6d83e 100755
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -261,8 +261,8 @@ prebuildWithoutPatch () {
   echo ""
   echo ""
   echo "Compiling $(pwd)"
-  echo "$MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
-  $MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
+  echo "$MVN clean test -DskipTests -Dxlint=all -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
+  $MVN clean test -DskipTests -Dxlint=all -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
   if [[ $? != 0 ]] ; then
     echo "Top-level trunk compilation is broken?"
     JIRA_COMMENT="$JIRA_COMMENT
@@ -385,8 +385,8 @@ checkJavacWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
-  $MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavacWarnings.txt 2>&1
+  echo "$MVN clean test -DskipTests -Dxlint=all -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
+  $MVN clean test -DskipTests -Dxlint=all -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavacWarnings.txt 2>&1
   if [[ $? != 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 


[16/41] ambari git commit: AMBARI-19149. Code cleanup: String concatenation in StringBuilder.append()

Posted by ao...@apache.org.
AMBARI-19149. Code cleanup: String concatenation in StringBuilder.append()


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/038f637e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/038f637e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/038f637e

Branch: refs/heads/branch-3.0-perf
Commit: 038f637ec16b41ea726a58211bb4cf672d234570
Parents: 1b663da
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Mon Apr 3 15:25:41 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../ambari/server/actionmanager/Stage.java      | 22 ++++++++++----------
 .../server/configuration/Configuration.java     |  2 +-
 .../ambari/server/controller/ActionRequest.java | 19 +++++++++--------
 .../server/controller/ClusterResponse.java      | 17 +++++++--------
 .../server/controller/ExecuteActionRequest.java | 16 +++++++-------
 .../server/controller/ServiceRequest.java       | 10 ++++-----
 .../server/controller/ShortTaskStatus.java      | 19 ++++++++---------
 .../ambari/server/controller/UserRequest.java   |  2 +-
 .../customactions/ActionDefinitionManager.java  |  4 ++--
 .../server/orm/helpers/dbms/PostgresHelper.java |  2 +-
 .../server/stageplanner/RoleGraphNode.java      |  2 +-
 .../apache/ambari/server/state/HostConfig.java  |  4 ++--
 .../server/state/ServiceComponentImpl.java      | 13 +++++++-----
 .../apache/ambari/server/state/ServiceImpl.java | 10 +++++----
 .../server/topology/BlueprintValidatorImpl.java |  2 +-
 .../ganglia/GangliaPropertyProviderTest.java    |  6 +++---
 .../RangerWebAlertConfigActionTest.java         |  2 +-
 17 files changed, 77 insertions(+), 75 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
index cca279b..a46e6a2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
@@ -930,19 +930,19 @@ public class Stage {
   public synchronized String toString() {
     StringBuilder builder = new StringBuilder();
     builder.append("STAGE DESCRIPTION BEGIN\n");
-    builder.append("requestId="+requestId+"\n");
-    builder.append("stageId="+stageId+"\n");
-    builder.append("clusterName="+clusterName+"\n");
-    builder.append("logDir=" + logDir+"\n");
-    builder.append("requestContext="+requestContext+"\n");
-    builder.append("clusterHostInfo="+clusterHostInfo+"\n");
-    builder.append("commandParamsStage="+commandParamsStage+"\n");
-    builder.append("hostParamsStage="+hostParamsStage+"\n");
-    builder.append("status="+status+"\n");
-    builder.append("displayStatus="+displayStatus+"\n");
+    builder.append("requestId=").append(requestId).append("\n");
+    builder.append("stageId=").append(stageId).append("\n");
+    builder.append("clusterName=").append(clusterName).append("\n");
+    builder.append("logDir=").append(logDir).append("\n");
+    builder.append("requestContext=").append(requestContext).append("\n");
+    builder.append("clusterHostInfo=").append(clusterHostInfo).append("\n");
+    builder.append("commandParamsStage=").append(commandParamsStage).append("\n");
+    builder.append("hostParamsStage=").append(hostParamsStage).append("\n");
+    builder.append("status=").append(status).append("\n");
+    builder.append("displayStatus=").append(displayStatus).append("\n");
     builder.append("Success Factors:\n");
     for (Role r : successFactors.keySet()) {
-      builder.append("  role: "+r+", factor: "+successFactors.get(r)+"\n");
+      builder.append("  role: ").append(r).append(", factor: ").append(successFactors.get(r)).append("\n");
     }
     for (HostRoleCommand hostRoleCommand : getOrderedHostRoleCommands()) {
       builder.append("HOST: ").append(hostRoleCommand.getHostName()).append(" :\n");

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 537b993..a8bfbf0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -5673,7 +5673,7 @@ public class Configuration {
     // now write out specific groupings
     StringBuilder baselineBuffer = new StringBuilder(1024);
     for( ConfigurationGrouping grouping : ConfigurationGrouping.values() ){
-      baselineBuffer.append("#### " + grouping);
+      baselineBuffer.append("#### ").append(grouping);
       baselineBuffer.append(System.lineSeparator());
       baselineBuffer.append("| Property Name | ");
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionRequest.java
index 6fdac2f..7a4fd36 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionRequest.java
@@ -121,14 +121,15 @@ public class ActionRequest {
 
   @Override
   public String toString() {
-    return (new StringBuilder()).
-        append("actionName :" + actionName).
-        append(", actionType :" + actionType).
-        append(", inputs :" + inputs).
-        append(", targetService :" + targetService).
-        append(", targetComponent :" + targetComponent).
-        append(", description :" + description).
-        append(", targetType :" + targetType).
-        append(", defaultTimeout :" + defaultTimeout).toString();
+    return new StringBuilder()
+      .append("actionName :").append(actionName)
+      .append(", actionType :").append(actionType)
+      .append(", inputs :").append(inputs)
+      .append(", targetService :").append(targetService)
+      .append(", targetComponent :").append(targetComponent)
+      .append(", description :").append(description)
+      .append(", targetType :").append(targetType)
+      .append(", defaultTimeout :").append(defaultTimeout)
+      .toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
index b3a2def..b89bd45 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
@@ -135,13 +135,12 @@ public class ClusterResponse {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("{"
-        + " clusterName=" + clusterName
-        + ", clusterId=" + clusterId
-        + ", provisioningState=" + provisioningState
-        + ", desiredStackVersion=" + desiredStackVersion
-        + ", totalHosts=" + totalHosts
-        + ", hosts=[");
+    sb.append("{ clusterName=").append(clusterName)
+      .append(", clusterId=").append(clusterId)
+      .append(", provisioningState=").append(provisioningState)
+      .append(", desiredStackVersion=").append(desiredStackVersion)
+      .append(", totalHosts=").append(totalHosts)
+      .append(", hosts=[");
 
     if (hostNames != null) {
       int i = 0;
@@ -153,9 +152,7 @@ public class ClusterResponse {
         sb.append(hostName);
       }
     }
-    sb.append("]"
-        + ", clusterHealthReport= " + clusterHealthReport
-        + "}");
+    sb.append("], clusterHealthReport= ").append(clusterHealthReport).append("}");
     return sb.toString();
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
index 351974a..a659f96 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
@@ -107,13 +107,13 @@ public class ExecuteActionRequest {
 
   @Override
   public synchronized String toString() {
-    return (new StringBuilder()).
-        append("isCommand :" + isCommand().toString()).
-        append(", action :" + actionName).
-        append(", command :" + commandName).
-        append(", inputs :" + parameters.toString()).
-        append(", resourceFilters: " + resourceFilters).
-        append(", exclusive: " + exclusive).
-        append(", clusterName :" + clusterName).toString();
+    return new StringBuilder()
+      .append("isCommand :").append(isCommand())
+      .append(", action :").append(actionName)
+      .append(", command :").append(commandName)
+      .append(", inputs :").append(parameters)
+      .append(", resourceFilters: ").append(resourceFilters)
+      .append(", exclusive: ").append(exclusive)
+      .append(", clusterName :").append(clusterName).toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceRequest.java
index a8e6315..6c0d4ea 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceRequest.java
@@ -132,11 +132,11 @@ public class ServiceRequest {
 
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("clusterName=" + clusterName
-        + ", serviceName=" + serviceName
-        + ", desiredState=" + desiredState
-        + ", credentialStoreEnabled=" + credentialStoreEnabled
-        + ", credentialStoreSupported=" + credentialStoreSupported);
+    sb.append("clusterName=").append(clusterName)
+      .append(", serviceName=").append(serviceName)
+      .append(", desiredState=").append(desiredState)
+      .append(", credentialStoreEnabled=").append(credentialStoreEnabled)
+      .append(", credentialStoreSupported=").append(credentialStoreSupported);
     return sb.toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/controller/ShortTaskStatus.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ShortTaskStatus.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ShortTaskStatus.java
index 975476f..df40aa8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ShortTaskStatus.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ShortTaskStatus.java
@@ -144,16 +144,15 @@ public class ShortTaskStatus {
 
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("ShortTaskStatusDump "
-        + ", stageId=" + stageId
-        + ", taskId=" + taskId
-        + ", hostname=" + hostName
-        + ", role=" + role
-        + ", command=" + command
-        + ", status=" + status
-        + ", outputLog=" + outputLog
-        + ", errorLog=" + errorLog
-    );
+    sb.append("ShortTaskStatusDump ")
+      .append(", stageId=").append(stageId)
+      .append(", taskId=").append(taskId)
+      .append(", hostname=").append(hostName)
+      .append(", role=").append(role)
+      .append(", command=").append(command)
+      .append(", status=").append(status)
+      .append(", outputLog=").append(outputLog)
+      .append(", errorLog=").append(errorLog);
     return sb.toString();
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/controller/UserRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/UserRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/UserRequest.java
index 282131a..c25fffe 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/UserRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/UserRequest.java
@@ -70,7 +70,7 @@ public class UserRequest {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("User, username=" + userName);
+    sb.append("User, username=").append(userName);
     return sb.toString();
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/customactions/ActionDefinitionManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/customactions/ActionDefinitionManager.java b/ambari-server/src/main/java/org/apache/ambari/server/customactions/ActionDefinitionManager.java
index 7a2738b..05af097 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/customactions/ActionDefinitionManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/customactions/ActionDefinitionManager.java
@@ -79,7 +79,7 @@ public class ActionDefinitionManager {
     try {
       return Enum.valueOf(enumm, s);
     } catch (IllegalArgumentException iaex) {
-      reason.append("Invalid value provided for " + enumm.getName());
+      reason.append("Invalid value provided for ").append(enumm.getName());
       return null;
     }
   }
@@ -147,7 +147,7 @@ public class ActionDefinitionManager {
       }
 
       if (actionType == null || actionType == ActionType.SYSTEM_DISABLED) {
-        reason.append("Action type cannot be " + actionType);
+        reason.append("Action type cannot be ").append(actionType);
         return false;
       }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/PostgresHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/PostgresHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/PostgresHelper.java
index ad18428..2237f86 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/PostgresHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/PostgresHelper.java
@@ -75,6 +75,6 @@ public class PostgresHelper extends GenericDbmsHelper {
 
   @Override
   public  StringBuilder writeDropPrimaryKeyStatement(StringBuilder builder, String constraintName, boolean cascade){
-      return builder.append("DROP CONSTRAINT ").append(constraintName + (cascade ? " CASCADE" : ""));
+      return builder.append("DROP CONSTRAINT ").append(constraintName).append(cascade ? " CASCADE" : "");
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraphNode.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraphNode.java b/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraphNode.java
index d2aac3c..4e305bd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraphNode.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraphNode.java
@@ -72,7 +72,7 @@ public class RoleGraphNode {
   @Override
   public String toString() {
     StringBuilder builder = new StringBuilder();
-    builder.append("("+role+", "+command +", "+inDegree+")");
+    builder.append("(").append(role).append(", ").append(command).append(", ").append(inDegree).append(")");
     return builder.toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/state/HostConfig.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/HostConfig.java b/ambari-server/src/main/java/org/apache/ambari/server/state/HostConfig.java
index f95d843..5395f4c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/HostConfig.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/HostConfig.java
@@ -62,7 +62,7 @@ public class HostConfig {
     StringBuilder sb = new StringBuilder();
     sb.append("{");
     if (defaultVersionTag != null) {
-      sb.append("default = " + defaultVersionTag);
+      sb.append("default = ").append(defaultVersionTag);
     }
     if (!configGroupOverrides.isEmpty()) {
       sb.append(", overrides = [ ");
@@ -71,7 +71,7 @@ public class HostConfig {
         if (i++ != 0) {
           sb.append(", ");
         }
-        sb.append(entry.getKey().toString() + " : " + entry.getValue());
+        sb.append(entry.getKey()).append(" : ").append(entry.getValue());
       }
       sb.append("]");
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
index 515cc63..f6ddc6d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
@@ -439,11 +439,14 @@ public class ServiceComponentImpl implements ServiceComponent {
 
   @Override
   public void debugDump(StringBuilder sb) {
-    sb.append("ServiceComponent={ serviceComponentName=" + getName() + ", recoveryEnabled="
-        + isRecoveryEnabled() + ", clusterName=" + service.getCluster().getClusterName()
-        + ", clusterId=" + service.getCluster().getClusterId() + ", serviceName="
-        + service.getName() + ", desiredStackVersion=" + getDesiredStackVersion()
-        + ", desiredState=" + getDesiredState().toString() + ", hostcomponents=[ ");
+    sb.append("ServiceComponent={ serviceComponentName=").append(getName())
+      .append(", recoveryEnabled=").append(isRecoveryEnabled())
+      .append(", clusterName=").append(service.getCluster().getClusterName())
+      .append(", clusterId=").append(service.getCluster().getClusterId())
+      .append(", serviceName=").append(service.getName())
+      .append(", desiredStackVersion=").append(getDesiredStackVersion())
+      .append(", desiredState=").append(getDesiredState())
+      .append(", hostcomponents=[ ");
     boolean first = true;
     for (ServiceComponentHost sch : hostComponents.values()) {
       if (!first) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
index 9caff6a..a0c0db1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
@@ -423,10 +423,12 @@ public class ServiceImpl implements Service {
 
   @Override
   public void debugDump(StringBuilder sb) {
-    sb.append("Service={ serviceName=" + getName() + ", clusterName=" + cluster.getClusterName()
-        + ", clusterId=" + cluster.getClusterId() + ", desiredStackVersion="
-        + getDesiredStackVersion() + ", desiredState=" + getDesiredState().toString()
-        + ", components=[ ");
+    sb.append("Service={ serviceName=").append(getName())
+      .append(", clusterName=").append(cluster.getClusterName())
+      .append(", clusterId=").append(cluster.getClusterId())
+      .append(", desiredStackVersion=").append(getDesiredStackVersion())
+      .append(", desiredState=").append(getDesiredState())
+      .append(", components=[ ");
     boolean first = true;
     for (ServiceComponent sc : components.values()) {
       if (!first) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
index f994457..9688c60 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java
@@ -104,7 +104,7 @@ public class BlueprintValidatorImpl implements BlueprintValidator {
             String propertyValue = propertyEntry.getValue();
             if (propertyValue != null) {
               if (SecretReference.isSecret(propertyValue)) {
-                errorMessage.append("  Config:" + configType + " Property:" + propertyName+"\n");
+                errorMessage.append("  Config:").append(configType).append(" Property:").append(propertyName).append("\n");
                 containsSecretReferences = true;
               }
             }

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProviderTest.java
index 5ef7117..62b7d8a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProviderTest.java
@@ -474,9 +474,9 @@ public class GangliaPropertyProviderTest {
       resources.add(resource);
       
       if (hostsList.length() != 0)
-        hostsList.append("," + "host" + i );
+        hostsList.append(",host").append(i);
       else
-        hostsList.append("host" + i); 
+        hostsList.append("host").append(i);
     }
 
     // only ask for one property
@@ -945,7 +945,7 @@ public class GangliaPropertyProviderTest {
       for (String metricRegex: metricsRegexes)
       {
         if (entry.getKey().startsWith(metricRegex)) {
-          metricsBuilder.append(entry.getValue().getPropertyId() + ",");
+          metricsBuilder.append(entry.getValue().getPropertyId()).append(",");
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/038f637e/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerWebAlertConfigActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerWebAlertConfigActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerWebAlertConfigActionTest.java
index 5d2e605..b4edb41 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerWebAlertConfigActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerWebAlertConfigActionTest.java
@@ -117,7 +117,7 @@ public class RangerWebAlertConfigActionTest {
 
       try {
         while (scanner.hasNextLine()) {
-          rangerAlertsConfigFile.append(scanner.nextLine() + lineSeparator);
+          rangerAlertsConfigFile.append(scanner.nextLine()).append(lineSeparator);
         }
         Mockito.when(alertDefinitionEntity.getSource()).thenReturn(rangerAlertsConfigFile.toString());
       } finally {


[04/41] ambari git commit: AMBARI-20668 Component identities is not updated in Add Service Wizard (akovalenko)

Posted by ao...@apache.org.
AMBARI-20668 Component identities is not updated in Add Service Wizard (akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/14d2581d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/14d2581d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/14d2581d

Branch: refs/heads/branch-3.0-perf
Commit: 14d2581d8423db699a283c7cfd26dbce9db8b628
Parents: 112f7d4
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Tue Apr 4 14:50:40 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/mixins/wizard/addSecurityConfigs.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/14d2581d/ambari-web/app/mixins/wizard/addSecurityConfigs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/wizard/addSecurityConfigs.js b/ambari-web/app/mixins/wizard/addSecurityConfigs.js
index 2e6d3bc..e8da47f 100644
--- a/ambari-web/app/mixins/wizard/addSecurityConfigs.js
+++ b/ambari-web/app/mixins/wizard/addSecurityConfigs.js
@@ -390,7 +390,7 @@ App.AddSecurityConfigs = Em.Mixin.create({
         var prop = identity[item];
 
         // compare ui rendered config against identity with `configuration attribute` (Most of the identities have `configuration attribute`)
-        var isIdentityWithConfig =  (prop.configuration && prop.configuration.split('/')[0] === config.filename && prop.configuration.split('/')[1] === config.name);
+        var isIdentityWithConfig = (prop.configuration && prop.configuration.split('/')[0] === App.config.getConfigTagFromFileName(config.filename) && prop.configuration.split('/')[1] === config.name);
 
         // compare ui rendered config against identity without `configuration attribute` (For example spnego principal and keytab)
         var isIdentityWithoutConfig = (!prop.configuration && identity.name === config.name.split('_')[0] && item === config.name.split('_')[1]);


[09/41] ambari git commit: AMBARI-20571. Remove property atlas.cluster.name from hive-site during HDP stack upgrades (aonishuk)

Posted by ao...@apache.org.
AMBARI-20571. Remove property atlas.cluster.name from hive-site during HDP stack upgrades (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/97338089
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/97338089
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/97338089

Branch: refs/heads/branch-3.0-perf
Commit: 97338089198e62a558cadc2b2c2414ba424d9346
Parents: 5c91e40
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Apr 3 16:18:23 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml  | 6 ++++++
 .../stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml         | 6 ++++++
 .../stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml         | 6 ++++++
 .../src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml | 1 +
 .../src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml | 1 +
 .../main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml  | 5 +++++
 .../stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml         | 6 ++++++
 .../stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml         | 6 ++++++
 .../src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml | 1 +
 .../src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml | 1 +
 .../main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml  | 6 ++++++
 .../stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml         | 6 ++++++
 .../stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml         | 6 ++++++
 .../src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml | 6 ++++++
 .../src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml | 1 +
 15 files changed, 64 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
index b1bbb32..63cf5f5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
@@ -204,6 +204,12 @@
             <type>hive-site</type>
             <set key="hive.enforce.bucketing" value="true"/>
           </definition>
+
+          <definition xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name">
+            <type>hive-site</type>
+            <transfer operation="delete" delete-key="atlas.cluster.name"/>
+          </definition>
+
         </changes>
       </component>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
index adccac0..73a4f5e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
@@ -393,6 +393,12 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Updating Hive properties">
+        <task xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name">
+          <summary>Removing atlas.cluster.name property</summary>
+        </task>
+      </execute-stage>
+
       <!--OOZIE-->
       <execute-stage service="OOZIE" component="OOZIE_SERVER" title="Apply config changes for Oozie Server">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.OozieConfigCalculation">

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
index b5dce2d..3a7df7d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
@@ -472,6 +472,12 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Updating Hive properties">
+        <task xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name">
+          <summary>Removing atlas.cluster.name property</summary>
+        </task>
+      </execute-stage>
+
       <!--OOZIE-->
       <execute-stage service="OOZIE" component="OOZIE_SERVER" title="Apply config changes for Oozie Server">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.OozieConfigCalculation">

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
index 9396f80..ca612b8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
@@ -860,6 +860,7 @@
           <task xsi:type="configure" id="hdp_2_4_0_0_hive_server_configure_authentication"/>
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_hive_audit_db" />
           <task xsi:type="configure" id="hive_log4j_parameterize" />
+          <task xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name" />
 
           <!-- Remove Atlas configs that were incorrectly added to hive-site instead of Atlas' application.properties. -->
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_hive_atlas_configs" />

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
index 7917343..1fbf81e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
@@ -891,6 +891,7 @@
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_4_0_0_hive_server_configure_authentication"/>
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_hive_audit_db" />
+          <task xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name" />
 
           <!-- Remove Atlas configs that were incorrectly added to hive-site instead of Atlas' application.properties. -->
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_hive_atlas_configs" />

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index 48a2d96..2345d08 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -106,6 +106,11 @@
             <type>hive-site</type>
             <set key="hive.enforce.bucketing" value="true"/>
           </definition>
+
+          <definition xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name">
+            <type>hive-site</type>
+            <transfer operation="delete" delete-key="atlas.cluster.name"/>
+          </definition>
          </changes>
       </component>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
index cd63651..bb6506e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
@@ -419,6 +419,12 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Updating Hive properties">
+        <task xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name">
+          <summary>Removing atlas.cluster.name property</summary>
+        </task>
+      </execute-stage>
+
      <!-- HBASE -->
       <execute-stage service="HBASE" component="HBASE_MASTER" title="Apply config changes for Hbase Master">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_hbase_audit_db"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
index df05749..c6c3a0d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
@@ -513,6 +513,12 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Updating Hive properties">
+        <task xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name">
+          <summary>Removing atlas.cluster.name property</summary>
+        </task>
+      </execute-stage>
+
       <!-- HBASE -->
       <execute-stage service="HBASE" component="HBASE_MASTER" title="Apply config changes for Hbase Master">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_hbase_audit_db"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
index a642320..4654ac8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
@@ -861,6 +861,7 @@
             <summary>Update hive-env content</summary>
           </task>
           <task xsi:type="configure" id="hive_log4j_parameterize" />
+          <task xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name" />
         </pre-upgrade>
 
         <pre-downgrade/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
index ee8e10e..ec7d8b8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
@@ -899,6 +899,7 @@
           <task xsi:type="configure" id="hive_log4j_parameterize" />
 
           <task xsi:type="configure" id="hdp_2_6_0_0_hive_set_hive_enforce_bucketing_property" />
+          <task xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name" />
 
         </pre-upgrade>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
index 090e66f..b70b9df 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
@@ -432,6 +432,7 @@
             <regex-replace key="content" find="property.llap.daemon.log.maxbackupindex = ([0-9]+)" replace-with="property.llap.daemon.log.maxbackupindex = {{hive_llap_log_maxbackupindex}}"/>
           </definition>
 
+
           <definition xsi:type="configure" id="hdp_2_6_0_0_hive_set_hive_enforce_bucketing_property">
             <type>hive-site</type>
             <set key="hive.enforce.bucketing" value="true"/>
@@ -481,6 +482,11 @@
             <transfer operation="copy" from-type="hive-site" from-key="hive.tez.container.size" to-key="hive.tez.container.size" default-value="682"  if-type="hive-interactive-site" if-key="hive.tez.container.size" if-key-state="absent"/>
           </definition>
 
+          <definition xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name">
+            <type>hive-site</type>
+            <transfer operation="delete" delete-key="atlas.cluster.name"/>
+          </definition>
+
         </changes>
 
       </component>

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
index 6f7fbd0..95ca3a2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
@@ -292,6 +292,12 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Updating Hive properties">
+        <task xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name">
+          <summary>Removing atlas.cluster.name property</summary>
+        </task>
+      </execute-stage>
+
       <!-- STORM -->
       <execute-stage service="STORM" component="NIMBUS" title="Apply config changes for Storm">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_empty_storm_topology_submission_notifier_plugin_class"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index c5024b6..05ecb2c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -569,6 +569,12 @@
           <summary>Updating the Hive Log4J2 properties to include parameterizations</summary>
         </task>
       </execute-stage>
+
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Updating Hive properties">
+        <task xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name">
+          <summary>Removing atlas.cluster.name property</summary>
+        </task>
+      </execute-stage>
       
       <!-- SPARK -->
       <execute-stage service="SPARK" component="LIVY_SERVER" title="Apply config changes for Livy Server">

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml
index ab8e8b5..37f92fd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml
@@ -729,6 +729,12 @@
       </component>
 
       <component name="HIVE_SERVER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name" />
+        </pre-upgrade>
+
+        <pre-downgrade />
+
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/97338089/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
index e2482e5..0dab6f4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
@@ -813,6 +813,7 @@
           <task xsi:type="configure" id="hive_llap_log4j_parameterize" />
           <task xsi:type="configure" id="hdp_2_6_0_0_hive_set_hive_enforce_bucketing_property" />
           <task xsi:type="configure" id="hdp_2_6_0_0_copy_hive_tez_container_size_to_hiveInteractive" />
+          <task xsi:type="configure" id="hdp_2_5_0_0_remove_atlas_cluster_name" />
         </pre-upgrade>
         
         <pre-downgrade />


[37/41] ambari git commit: AMBARI-20548. Grafana dashboard changes for some new llap daemon metrics (vivekratnavel)

Posted by ao...@apache.org.
AMBARI-20548. Grafana dashboard changes for some new llap daemon metrics (vivekratnavel)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c7021a60
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c7021a60
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c7021a60

Branch: refs/heads/branch-3.0-perf
Commit: c7021a60432a79d37e6b1fbf5106b444edec56f6
Parents: 426e895
Author: Vivek Ratnavel Subramanian <vi...@gmail.com>
Authored: Wed Apr 5 15:43:26 2017 -0700
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../HDP/grafana-llapdaemon-daemons.json         | 313 ++++++++++++++-----
 1 file changed, 231 insertions(+), 82 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c7021a60/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-llapdaemon-daemons.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-llapdaemon-daemons.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-llapdaemon-daemons.json
index 2f69868..849f296 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-llapdaemon-daemons.json
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-llapdaemon-daemons.json
@@ -1189,7 +1189,7 @@
             "threshold2": null,
             "threshold2Color": "rgba(234, 112, 112, 0.22)"
           },
-          "id": 29,
+          "id": 14,
           "isNew": true,
           "legend": {
             "avg": false,
@@ -1214,21 +1214,21 @@
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "max",
-              "alias": "95th Percentile",
+              "aggregator": "sum",
+              "alias": "MemHeapUsed",
               "app": "llapdaemon",
               "downsampleAggregator": "avg",
               "errors": {},
-              "metric": "io.IOMetrics.PercentileDecodingTime_30s95thPercentileLatency",
+              "metric": "jvm.JvmMetrics.MemHeapUsedM",
               "precision": "default",
-              "refId": "B",
+              "refId": "C",
               "templatedHost": "",
               "transform": "none"
             }
           ],
           "timeFrom": null,
           "timeShift": null,
-          "title": "95th Percentile Column Decoding Time (30s interval)",
+          "title": "Total JVM Heap Used",
           "tooltip": {
             "shared": false,
             "value_type": "cumulative"
@@ -1238,7 +1238,7 @@
           "x-axis": true,
           "y-axis": true,
           "y_formats": [
-            "ms",
+            "mbytes",
             "short"
           ]
         },
@@ -1261,7 +1261,7 @@
             "threshold2": null,
             "threshold2Color": "rgba(234, 112, 112, 0.22)"
           },
-          "id": 30,
+          "id": 15,
           "isNew": true,
           "legend": {
             "avg": false,
@@ -1286,21 +1286,21 @@
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "max",
-              "alias": "Max",
+              "aggregator": "sum",
+              "alias": "MemNonHeapUsed",
               "app": "llapdaemon",
               "downsampleAggregator": "avg",
               "errors": {},
-              "metric": "io.IOMetrics.MaxDecodingTime",
+              "metric": "jvm.JvmMetrics.MemNonHeapUsedM",
               "precision": "default",
-              "refId": "A",
+              "refId": "C",
               "templatedHost": "",
               "transform": "none"
             }
           ],
           "timeFrom": null,
           "timeShift": null,
-          "title": "Max Column Decoding Time",
+          "title": "Total JVM Non-Heap Used",
           "tooltip": {
             "shared": false,
             "value_type": "cumulative"
@@ -1310,19 +1310,10 @@
           "x-axis": true,
           "y-axis": true,
           "y_formats": [
-            "ms",
+            "mbytes",
             "short"
           ]
-        }
-      ],
-      "showTitle": true,
-      "title": "IO Elevator Metrics"
-    },
-    {
-      "collapse": true,
-      "editable": true,
-      "height": "250px",
-      "panels": [
+        },
         {
           "aliasColors": {},
           "bars": false,
@@ -1342,7 +1333,7 @@
             "threshold2": null,
             "threshold2Color": "rgba(234, 112, 112, 0.22)"
           },
-          "id": 14,
+          "id": 12,
           "isNew": true,
           "legend": {
             "avg": false,
@@ -1367,21 +1358,21 @@
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "sum",
-              "alias": "MemHeapUsed",
+              "aggregator": "max",
+              "alias": "GcTotalExtraSleepTime",
               "app": "llapdaemon",
               "downsampleAggregator": "avg",
               "errors": {},
-              "metric": "jvm.JvmMetrics.MemHeapUsedM",
+              "metric": "jvm.JvmMetrics.GcTotalExtraSleepTime",
               "precision": "default",
-              "refId": "C",
+              "refId": "A",
               "templatedHost": "",
-              "transform": "none"
+              "transform": "diff"
             }
           ],
           "timeFrom": null,
           "timeShift": null,
-          "title": "Total JVM Heap Used",
+          "title": "Max GcTotalExtraSleepTime",
           "tooltip": {
             "shared": false,
             "value_type": "cumulative"
@@ -1391,7 +1382,7 @@
           "x-axis": true,
           "y-axis": true,
           "y_formats": [
-            "mbytes",
+            "ms",
             "short"
           ]
         },
@@ -1408,19 +1399,22 @@
             "leftMin": 0,
             "rightLogBase": 1,
             "rightMax": null,
-            "rightMin": null,
+            "rightMin": 0,
             "threshold1": null,
             "threshold1Color": "rgba(216, 200, 27, 0.27)",
             "threshold2": null,
-            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+            "threshold2Color": "rgba(234, 112, 112, 0.22)",
+            "thresholdLine": false
           },
-          "id": 15,
+          "id": 13,
           "isNew": true,
+          "leftYAxisLabel": "",
           "legend": {
             "avg": false,
             "current": false,
             "max": false,
             "min": false,
+            "rightSide": false,
             "show": false,
             "total": false,
             "values": false
@@ -1433,27 +1427,29 @@
           "pointradius": 5,
           "points": false,
           "renderer": "flot",
+          "rightYAxisLabel": "",
           "seriesOverrides": [],
           "span": 6,
           "stack": false,
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "sum",
-              "alias": "MemNonHeapUsed",
+              "aggregator": "max",
+              "alias": "GcTimeMillis",
               "app": "llapdaemon",
               "downsampleAggregator": "avg",
               "errors": {},
-              "metric": "jvm.JvmMetrics.MemNonHeapUsedM",
+              "hide": false,
+              "metric": "jvm.JvmMetrics.GcTimeMillis",
               "precision": "default",
-              "refId": "C",
+              "refId": "A",
               "templatedHost": "",
-              "transform": "none"
+              "transform": "diff"
             }
           ],
           "timeFrom": null,
           "timeShift": null,
-          "title": "Total JVM Non-Heap Used",
+          "title": "Max GcTimeMillis",
           "tooltip": {
             "shared": false,
             "value_type": "cumulative"
@@ -1463,7 +1459,7 @@
           "x-axis": true,
           "y-axis": true,
           "y_formats": [
-            "mbytes",
+            "ms",
             "short"
           ]
         },
@@ -1486,7 +1482,7 @@
             "threshold2": null,
             "threshold2Color": "rgba(234, 112, 112, 0.22)"
           },
-          "id": 12,
+          "id": 43,
           "isNew": true,
           "legend": {
             "avg": false,
@@ -1512,20 +1508,20 @@
           "targets": [
             {
               "aggregator": "max",
-              "alias": "GcTotalExtraSleepTime",
+              "alias": "ThreadsWaiting",
               "app": "llapdaemon",
               "downsampleAggregator": "avg",
               "errors": {},
-              "metric": "jvm.JvmMetrics.GcTotalExtraSleepTime",
+              "metric": "jvm.JvmMetrics.ThreadsWaiting",
               "precision": "default",
-              "refId": "A",
+              "refId": "B",
               "templatedHost": "",
-              "transform": "diff"
+              "transform": "none"
             }
           ],
           "timeFrom": null,
           "timeShift": null,
-          "title": "Max GcTotalExtraSleepTime",
+          "title": "Max JVM Threads Waiting",
           "tooltip": {
             "shared": false,
             "value_type": "cumulative"
@@ -1535,7 +1531,7 @@
           "x-axis": true,
           "y-axis": true,
           "y_formats": [
-            "ms",
+            "short",
             "short"
           ]
         },
@@ -1552,22 +1548,19 @@
             "leftMin": 0,
             "rightLogBase": 1,
             "rightMax": null,
-            "rightMin": 0,
+            "rightMin": null,
             "threshold1": null,
             "threshold1Color": "rgba(216, 200, 27, 0.27)",
             "threshold2": null,
-            "threshold2Color": "rgba(234, 112, 112, 0.22)",
-            "thresholdLine": false
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
           },
-          "id": 13,
+          "id": 44,
           "isNew": true,
-          "leftYAxisLabel": "",
           "legend": {
             "avg": false,
             "current": false,
             "max": false,
             "min": false,
-            "rightSide": false,
             "show": false,
             "total": false,
             "values": false
@@ -1580,7 +1573,6 @@
           "pointradius": 5,
           "points": false,
           "renderer": "flot",
-          "rightYAxisLabel": "",
           "seriesOverrides": [],
           "span": 6,
           "stack": false,
@@ -1588,21 +1580,20 @@
           "targets": [
             {
               "aggregator": "max",
-              "alias": "GcTimeMillis",
+              "alias": "ThreadsTimedWaiting",
               "app": "llapdaemon",
               "downsampleAggregator": "avg",
               "errors": {},
-              "hide": false,
-              "metric": "jvm.JvmMetrics.GcTimeMillis",
+              "metric": "jvm.JvmMetrics.ThreadsTimedWaiting",
               "precision": "default",
-              "refId": "A",
+              "refId": "B",
               "templatedHost": "",
-              "transform": "diff"
+              "transform": "none"
             }
           ],
           "timeFrom": null,
           "timeShift": null,
-          "title": "Max GcTimeMillis",
+          "title": "Max JVM Threads Timed Waiting",
           "tooltip": {
             "shared": false,
             "value_type": "cumulative"
@@ -1612,7 +1603,7 @@
           "x-axis": true,
           "y-axis": true,
           "y_formats": [
-            "ms",
+            "short",
             "short"
           ]
         },
@@ -1759,6 +1750,89 @@
             "short",
             "short"
           ]
+        }
+      ],
+      "showTitle": true,
+      "title": "JVM Metrics"
+    },
+    {
+      "title": "OFFHEAP METRICS",
+      "height": "250px",
+      "editable": true,
+      "collapse": true,
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 47,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 1,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "DirectBufferMemoryUsed",
+              "app": "llapdaemon",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "jvm.LlapDaemonJVMMetrics.LlapDaemonDirectBufferMemoryUsed",
+              "precision": "default",
+              "refId": "C",
+              "templatedHost": "",
+              "transform": "none",
+              "seriesAggregator": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Direct Buffer Memory Used",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "transparent": true,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "short"
+          ]
         },
         {
           "aliasColors": {},
@@ -1779,7 +1853,7 @@
             "threshold2": null,
             "threshold2Color": "rgba(234, 112, 112, 0.22)"
           },
-          "id": 43,
+          "id": 46,
           "isNew": true,
           "legend": {
             "avg": false,
@@ -1804,21 +1878,22 @@
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "max",
-              "alias": "ThreadsWaiting",
+              "aggregator": "none",
+              "alias": "DirectBufferTotalCapacity",
               "app": "llapdaemon",
               "downsampleAggregator": "avg",
               "errors": {},
-              "metric": "jvm.JvmMetrics.ThreadsWaiting",
+              "metric": "jvm.LlapDaemonJVMMetrics.LlapDaemonDirectBufferTotalCapacity",
               "precision": "default",
-              "refId": "B",
+              "refId": "C",
               "templatedHost": "",
-              "transform": "none"
+              "transform": "none",
+              "seriesAggregator": "none"
             }
           ],
           "timeFrom": null,
           "timeShift": null,
-          "title": "Max JVM Threads Waiting",
+          "title": "Direct Buffer Total Capacity",
           "tooltip": {
             "shared": false,
             "value_type": "cumulative"
@@ -1828,7 +1903,7 @@
           "x-axis": true,
           "y-axis": true,
           "y_formats": [
-            "short",
+            "bytes",
             "short"
           ]
         },
@@ -1851,7 +1926,7 @@
             "threshold2": null,
             "threshold2Color": "rgba(234, 112, 112, 0.22)"
           },
-          "id": 44,
+          "id": 48,
           "isNew": true,
           "legend": {
             "avg": false,
@@ -1876,21 +1951,22 @@
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "max",
-              "alias": "ThreadsTimedWaiting",
+              "aggregator": "none",
+              "alias": "MappedBufferMemoryUsed",
               "app": "llapdaemon",
               "downsampleAggregator": "avg",
               "errors": {},
-              "metric": "jvm.JvmMetrics.ThreadsTimedWaiting",
+              "metric": "jvm.LlapDaemonJVMMetrics.LlapDaemonMappedBufferMemoryUsed",
               "precision": "default",
-              "refId": "B",
+              "refId": "C",
               "templatedHost": "",
-              "transform": "none"
+              "transform": "none",
+              "seriesAggregator": "none"
             }
           ],
           "timeFrom": null,
           "timeShift": null,
-          "title": "Max JVM Threads Timed Waiting",
+          "title": "Mapped Buffer Memory Used",
           "tooltip": {
             "shared": false,
             "value_type": "cumulative"
@@ -1900,13 +1976,85 @@
           "x-axis": true,
           "y-axis": true,
           "y_formats": [
-            "short",
+            "bytes",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 49,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 1,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "MappedBufferTotalCapacity",
+              "app": "llapdaemon",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "jvm.LlapDaemonJVMMetrics.LlapDaemonMappedBufferTotalCapacity",
+              "precision": "default",
+              "refId": "C",
+              "templatedHost": "",
+              "transform": "none",
+              "seriesAggregator": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Mapped Buffer Total Capacity",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "transparent": true,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
             "short"
           ]
         }
       ],
-      "showTitle": true,
-      "title": "JVM Metrics"
+      "showTitle": true
     }
   ],
   "time": {
@@ -1968,7 +2116,8 @@
         "allFormat": "glob",
         "current": {
           "text": "All",
-          "value": ""
+          "value": "",
+          "tags": []
         },
         "datasource": null,
         "includeAll": true,


[22/41] ambari git commit: AMBARI-20658.Click of 'NEW JOB' button doesn't create new worksheet tab. (Venkata Sairam via padmapriyanitt)

Posted by ao...@apache.org.
AMBARI-20658.Click of 'NEW JOB' button doesn't create new worksheet tab. (Venkata Sairam via padmapriyanitt)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/aeb6707f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/aeb6707f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/aeb6707f

Branch: refs/heads/branch-3.0-perf
Commit: aeb6707f61ae441e77ac443a9f3ce2e81fb208fb
Parents: caf6a25
Author: padmapriyanitt <pa...@gmail.com>
Authored: Tue Apr 4 12:06:36 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../hive20/src/main/resources/ui/app/routes/queries/new.js      | 5 ++++-
 .../ui/app/templates/components/top-application-bar.hbs         | 2 +-
 2 files changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/aeb6707f/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
index b48b8df..76a7439 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
@@ -22,10 +22,13 @@ export default Ember.Route.extend({
   beforeModel() {
     let existingWorksheets = this.store.peekAll('worksheet');
     let newWorksheetName = 'worksheet';
-    if(!this.controllerFor("queries").worksheetCount) {
+    if(!this.controllerFor("queries").worksheetCount && !existingWorksheets.get("length")) {
       newWorksheetName = newWorksheetName + 1;
     } else {
       let id = parseInt(this.controllerFor("queries").worksheetCount);
+      if(!id){
+        id = existingWorksheets.get("length")+1;
+      }
       newWorksheetName = newWorksheetName + id;
     }
     let newWorksheetTitle = newWorksheetName.capitalize();

http://git-wip-us.apache.org/repos/asf/ambari/blob/aeb6707f/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs
index f163dca..8cdb779 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs
@@ -27,7 +27,7 @@
   {{#if (not (or service askPassword))}}
     <span class="pull-right">
       {{#link-to 'queries.new' class="btn btn-sm btn-success"}}{{fa-icon "plus"}} NEW JOB{{/link-to}}
-        {{#link-to 'databases.newtable' class="btn btn-sm btn-success"}}{{fa-icon "plus"}} NEW TABLE{{/link-to}}
+      {{#link-to 'databases.newtable' class="btn btn-sm btn-success"}}{{fa-icon "plus"}} NEW TABLE{{/link-to}}
     </span>
   {{/if}}
 </h3>


[36/41] ambari git commit: AMBARI-20629 Take Ambari Cluster Name as a property in Ranger plugin configs (mugdha)

Posted by ao...@apache.org.
AMBARI-20629 Take Ambari Cluster Name as a property in Ranger plugin configs (mugdha)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/112f7d4e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/112f7d4e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/112f7d4e

Branch: refs/heads/branch-3.0-perf
Commit: 112f7d4e79bd5d4e69d51cd95f7450981eaf18eb
Parents: be23039
Author: Mugdha Varadkar <mu...@apache.org>
Authored: Tue Apr 4 10:33:57 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../0.96.0.2.0/package/scripts/params_linux.py  |   3 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |   5 +-
 .../configuration/ranger-hdfs-audit.xml         |   9 ++
 .../3.0.0.3.0/package/scripts/params_linux.py   |   3 +
 .../configuration/ranger-hive-audit.xml         |   9 ++
 .../KAFKA/0.8.1/package/scripts/params.py       |   3 +
 .../0.5.0.2.2/package/scripts/params_linux.py   |   3 +
 .../0.5.0.2.3/package/scripts/params.py         |   5 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |   3 +
 .../configuration/ranger-yarn-audit.xml         |   9 ++
 .../3.0.0.3.0/package/scripts/params_linux.py   |   3 +
 .../ATLAS/configuration/ranger-atlas-audit.xml  |  33 ++++++
 .../HBASE/configuration/ranger-hbase-audit.xml  |  33 ++++++
 .../HDFS/configuration/ranger-hdfs-audit.xml    |  33 ++++++
 .../HIVE/configuration/ranger-hive-audit.xml    |  33 ++++++
 .../KAFKA/configuration/ranger-kafka-audit.xml  |  33 ++++++
 .../KNOX/configuration/ranger-knox-audit.xml    |  33 ++++++
 .../configuration/ranger-kms-audit.xml          |  33 ++++++
 .../STORM/configuration/ranger-storm-audit.xml  |  33 ++++++
 .../YARN/configuration/ranger-yarn-audit.xml    |  33 ++++++
 .../stacks/HDP/2.6/upgrades/config-upgrade.xml  | 108 +++++++++++++++++++
 .../HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml |  53 ++++++++-
 .../stacks/HDP/2.6/upgrades/upgrade-2.6.xml     |  33 ++++++
 23 files changed, 540 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index e78bfc2..3177643 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -408,6 +408,9 @@ if enable_ranger_hbase:
   if has_ranger_admin and stack_supports_ranger_audit_db and xa_audit_db_flavor.lower() == 'sqla':
     xa_audit_db_is_enabled = False
 
+# need this to capture cluster name from where ranger hbase plugin is enabled
+cluster_name = config['clusterName']
+
 # ranger hbase plugin section end
 
 create_hbase_home_directory = check_stack_feature(StackFeature.HBASE_HOME_DIRECTORY, stack_version_formatted)

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index b8785f3..f0566d7 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -547,4 +547,7 @@ if enable_ranger_hdfs:
   if has_ranger_admin and stack_supports_ranger_audit_db and xa_audit_db_flavor.lower() == 'sqla':
     xa_audit_db_is_enabled = False
 
-# ranger hdfs plugin section end
+# need this to capture cluster name from where ranger hdfs plugin is enabled
+cluster_name = config['clusterName']
+
+# ranger hdfs plugin section end
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/ranger-hdfs-audit.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/ranger-hdfs-audit.xml b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/ranger-hdfs-audit.xml
index 731b136..02b195f 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/ranger-hdfs-audit.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/ranger-hdfs-audit.xml
@@ -120,4 +120,13 @@
     </value-attributes>
     <on-ambari-upgrade add="false"/>
   </property>
+  <property>
+    <name>ranger.plugin.hdfs.ambari.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>Capture cluster name from where Ranger hdfs plugin is enabled.</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
index 4fa6f0c..58bb65f 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
@@ -543,4 +543,7 @@ if enable_ranger_hdfs:
   if has_ranger_admin and stack_supports_ranger_audit_db and xa_audit_db_flavor.lower() == 'sqla':
     xa_audit_db_is_enabled = False
 
+# need this to capture cluster name from where ranger hdfs plugin is enabled
+cluster_name = config['clusterName']
+
 # ranger hdfs plugin section end

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/configuration/ranger-hive-audit.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/configuration/ranger-hive-audit.xml b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/configuration/ranger-hive-audit.xml
index 95edb38..fc1c5ad 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/configuration/ranger-hive-audit.xml
+++ b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/configuration/ranger-hive-audit.xml
@@ -118,4 +118,13 @@
     </value-attributes>
     <on-ambari-upgrade add="false"/>
   </property>
+  <property>
+    <name>ranger.plugin.hive.ambari.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>Capture cluster name from where Ranger hive plugin is enabled.</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
index 5b9db89..32f18f2 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
@@ -296,6 +296,9 @@ if enable_ranger_kafka and is_supported_kafka_ranger:
   if has_ranger_admin and stack_supports_ranger_audit_db and xa_audit_db_flavor.lower() == 'sqla':
     xa_audit_db_is_enabled = False
 
+# need this to capture cluster name from where ranger kafka plugin is enabled
+cluster_name = config['clusterName']
+
 # ranger kafka plugin section end
 
 namenode_hosts = default("/clusterHostInfo/namenode_host", [])

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
index 5c07fa4..4558069 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
@@ -390,6 +390,9 @@ if enable_ranger_knox:
   if has_ranger_admin and stack_supports_ranger_audit_db and xa_audit_db_flavor == 'sqla':
     xa_audit_db_is_enabled = False
 
+# need this to capture cluster name from where ranger knox plugin is enabled
+cluster_name = config['clusterName']
+
 # ranger knox plugin end section
 
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] if has_namenode else None

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
index f2abe80..9fe0a61 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
@@ -293,4 +293,7 @@ ranger_kms_ssl_passwd = config['configurations']['ranger-kms-site']['ranger.serv
 ranger_kms_ssl_enabled = config['configurations']['ranger-kms-site']['ranger.service.https.attrib.ssl.enabled']
 
 xa_audit_hdfs_is_enabled = default("/configurations/ranger-kms-audit/xasecure.audit.destination.hdfs", False)
-namenode_host = default("/clusterHostInfo/namenode_host", [])
\ No newline at end of file
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+
+# need this to capture cluster name from where ranger kms plugin is enabled
+cluster_name = config['clusterName']
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 88be29c..3579fcb 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -500,4 +500,7 @@ if enable_ranger_yarn and is_supported_yarn_ranger:
   if has_ranger_admin and stack_supports_ranger_audit_db and xa_audit_db_flavor == 'sqla':
     xa_audit_db_is_enabled = False
 
+# need this to capture cluster name from where ranger yarn plugin is enabled
+cluster_name = config['clusterName']
+
 # ranger yarn plugin end section

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/ranger-yarn-audit.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/ranger-yarn-audit.xml b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/ranger-yarn-audit.xml
index a6b1baa..7a865b8 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/ranger-yarn-audit.xml
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/ranger-yarn-audit.xml
@@ -174,4 +174,13 @@
     </value-attributes>
     <on-ambari-upgrade add="false"/>
   </property>
+  <property>
+    <name>ranger.plugin.yarn.ambari.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>Capture cluster name from where Ranger yarn plugin is enabled.</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
index a8cdda9..66194ed 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
@@ -496,4 +496,7 @@ if enable_ranger_yarn and is_supported_yarn_ranger:
   if has_ranger_admin and stack_supports_ranger_audit_db and xa_audit_db_flavor == 'sqla':
     xa_audit_db_is_enabled = False
 
+# need this to capture cluster name from where ranger yarn plugin is enabled
+cluster_name = config['clusterName']
+
 # ranger yarn plugin end section

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/ranger-atlas-audit.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/ranger-atlas-audit.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/ranger-atlas-audit.xml
new file mode 100644
index 0000000..ecf9f9d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/ranger-atlas-audit.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+
+  <property>
+    <name>ranger.plugin.atlas.ambari.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>Capture cluster name from where Ranger atlas plugin is enabled.</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/ranger-hbase-audit.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/ranger-hbase-audit.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/ranger-hbase-audit.xml
new file mode 100644
index 0000000..f7ff00f
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/ranger-hbase-audit.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+
+  <property>
+    <name>ranger.plugin.hbase.ambari.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>Capture cluster name from where Ranger hbase plugin is enabled.</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/ranger-hdfs-audit.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/ranger-hdfs-audit.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/ranger-hdfs-audit.xml
new file mode 100644
index 0000000..dbc5b5d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/ranger-hdfs-audit.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+
+  <property>
+    <name>ranger.plugin.hdfs.ambari.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>Capture cluster name from where Ranger hdfs plugin is enabled.</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/ranger-hive-audit.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/ranger-hive-audit.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/ranger-hive-audit.xml
new file mode 100644
index 0000000..e1b2bf9
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/ranger-hive-audit.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+
+  <property>
+    <name>ranger.plugin.hive.ambari.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>Capture cluster name from where Ranger hive plugin is enabled.</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/stacks/HDP/2.6/services/KAFKA/configuration/ranger-kafka-audit.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/KAFKA/configuration/ranger-kafka-audit.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/KAFKA/configuration/ranger-kafka-audit.xml
new file mode 100644
index 0000000..440b44e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/KAFKA/configuration/ranger-kafka-audit.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+
+  <property>
+    <name>ranger.plugin.kafka.ambari.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>Capture cluster name from where Ranger kafka plugin is enabled.</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/stacks/HDP/2.6/services/KNOX/configuration/ranger-knox-audit.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/KNOX/configuration/ranger-knox-audit.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/KNOX/configuration/ranger-knox-audit.xml
new file mode 100644
index 0000000..9dc3ec5
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/KNOX/configuration/ranger-knox-audit.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+
+  <property>
+    <name>ranger.plugin.knox.ambari.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>Capture cluster name from where Ranger knox plugin is enabled.</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/stacks/HDP/2.6/services/RANGER_KMS/configuration/ranger-kms-audit.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/RANGER_KMS/configuration/ranger-kms-audit.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/RANGER_KMS/configuration/ranger-kms-audit.xml
new file mode 100644
index 0000000..e755770
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/RANGER_KMS/configuration/ranger-kms-audit.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+
+  <property>
+    <name>ranger.plugin.kms.ambari.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>Capture cluster name from where Ranger kms plugin is enabled.</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/stacks/HDP/2.6/services/STORM/configuration/ranger-storm-audit.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/STORM/configuration/ranger-storm-audit.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/STORM/configuration/ranger-storm-audit.xml
new file mode 100644
index 0000000..9122d92
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/STORM/configuration/ranger-storm-audit.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+
+  <property>
+    <name>ranger.plugin.storm.ambari.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>Capture cluster name from where Ranger storm plugin is enabled.</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/ranger-yarn-audit.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/ranger-yarn-audit.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/ranger-yarn-audit.xml
new file mode 100644
index 0000000..40a2f84
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/ranger-yarn-audit.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+
+  <property>
+    <name>ranger.plugin.yarn.ambari.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>Capture cluster name from where Ranger yarn plugin is enabled.</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
index 0d71244..ceed59b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
@@ -30,5 +30,113 @@
       </component>
     </service>
 
+    <service name="HDFS">
+      <component name="NAMENODE">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_maint_ranger_hdfs_plugin_cluster_name">
+            <type>ranger-hdfs-audit</type>
+            <set key="ranger.plugin.hdfs.ambari.cluster.name" value="{{cluster_name}}"
+              if-type="ranger-hdfs-plugin-properties" if-key="ranger-hdfs-plugin-enabled" if-key-state="present"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="HIVE">
+      <component name="HIVE_SERVER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_maint_ranger_hive_plugin_cluster_name">
+            <type>ranger-hive-audit</type>
+            <set key="ranger.plugin.hive.ambari.cluster.name" value="{{cluster_name}}"
+              if-type="ranger-hive-audit" if-key="xasecure.audit.destination.solr" if-key-state="present"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="HBASE">
+      <component name="HBASE_MASTER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_maint_ranger_hbase_plugin_cluster_name">
+            <type>ranger-hbase-audit</type>
+            <set key="ranger.plugin.hbase.ambari.cluster.name" value="{{cluster_name}}"
+              if-type="ranger-hbase-plugin-properties" if-key="ranger-hbase-plugin-enabled" if-key-state="present"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="KNOX">
+      <component name="KNOX_GATEWAY">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_maint_ranger_knox_plugin_cluster_name">
+            <type>ranger-knox-audit</type>
+            <set key="ranger.plugin.knox.ambari.cluster.name" value="{{cluster_name}}"
+              if-type="ranger-knox-plugin-properties" if-key="ranger-knox-plugin-enabled" if-key-state="present"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="STORM">
+      <component name="NIMBUS">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_maint_ranger_storm_plugin_cluster_name">
+            <type>ranger-storm-audit</type>
+            <set key="ranger.plugin.storm.ambari.cluster.name" value="{{cluster_name}}"
+              if-type="ranger-storm-plugin-properties" if-key="ranger-storm-plugin-enabled" if-key-state="present"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="YARN">
+      <component name="RESOURCEMANAGER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_maint_ranger_yarn_plugin_cluster_name">
+            <type>ranger-yarn-audit</type>
+            <set key="ranger.plugin.yarn.ambari.cluster.name" value="{{cluster_name}}"
+              if-type="ranger-yarn-plugin-properties" if-key="ranger-yarn-plugin-enabled" if-key-state="present"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="KAFKA">
+      <component name="KAFKA_BROKER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_maint_ranger_kafka_plugin_cluster_name">
+            <type>ranger-kafka-audit</type>
+            <set key="ranger.plugin.kafka.ambari.cluster.name" value="{{cluster_name}}"
+              if-type="ranger-kafka-plugin-properties" if-key="ranger-kafka-plugin-enabled" if-key-state="present"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="ATLAS">
+      <component name="ATLAS_SERVER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_maint_ranger_atlas_plugin_cluster_name">
+            <type>ranger-atlas-audit</type>
+            <set key="ranger.plugin.atlas.ambari.cluster.name" value="{{cluster_name}}"
+              if-type="ranger-atlas-plugin-properties" if-key="ranger-atlas-plugin-enabled" if-key-state="present"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="RANGER_KMS">
+      <component name="RANGER_KMS_SERVER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_maint_ranger_kms_plugin_cluster_name">
+            <type>ranger-kms-audit</type>
+            <set key="ranger.plugin.kms.ambari.cluster.name" value="{{cluster_name}}"
+              if-type="ranger-kms-audit" if-key="ranger.plugin.kms.ambari.cluster.name" if-key-state="absent"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
   </services>
 </upgrade-config-changes>

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
index 68c58c0..ce07f7a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
@@ -284,13 +284,58 @@
       </execute-stage>
     </group>
 
-    <!-- ToDo: on config updates, this section need to be uncommented and extended with the changes
+    
     <group xsi:type="cluster" name="Upgrade service configs" title="Upgrade service configs">
-      <direction>UPGRADE</direction>
-      <skippable>true</skippable>
+      <direction>UPGRADE</direction> <!--  prevent config changes on downgrade -->
+      <skippable>true</skippable> <!-- May fix configuration problems manually -->
+
+      <!-- HDFS -->
+      <execute-stage service="HDFS" component="NAMENODE" title="Apply config changes for Ranger Hdfs plugin">
+        <task xsi:type="configure" id="hdp_2_6_maint_ranger_hdfs_plugin_cluster_name"/>
+      </execute-stage>
+
+      <!-- HIVE -->
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Apply config changes for Ranger Hive plugin">
+        <task xsi:type="configure" id="hdp_2_6_maint_ranger_hive_plugin_cluster_name"/>
+      </execute-stage>
+
+      <!-- HBASE -->
+      <execute-stage service="HBASE" component="HBASE_MASTER" title="Apply config changes for Ranger Hbase plugin">
+        <task xsi:type="configure" id="hdp_2_6_maint_ranger_hbase_plugin_cluster_name"/>
+      </execute-stage>
+
+      <!-- KNOX -->
+      <execute-stage service="KNOX" component="KNOX_GATEWAY" title="Apply config changes for Ranger Knox plugin">
+        <task xsi:type="configure" id="hdp_2_6_maint_ranger_knox_plugin_cluster_name"/>
+      </execute-stage>
+
+      <!-- STORM -->
+      <execute-stage service="STORM" component="NIMBUS" title="Apply config changes for Ranger Storm plugin">
+        <task xsi:type="configure" id="hdp_2_6_maint_ranger_storm_plugin_cluster_name"/>
+      </execute-stage>
+
+      <!-- YARN -->
+      <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for Ranger Yarn plugin">
+        <task xsi:type="configure" id="hdp_2_6_maint_ranger_yarn_plugin_cluster_name"/>
+      </execute-stage>
+
+      <!-- KAFKA -->
+      <execute-stage service="KAFKA" component="KAFKA_BROKER" title="Apply config changes for Ranger Kafka plugin">
+        <task xsi:type="configure" id="hdp_2_6_maint_ranger_kafka_plugin_cluster_name"/>
+      </execute-stage>
+
+      <!-- ATLAS -->
+      <execute-stage service="ATLAS" component="ATLAS_SERVER" title="Apply config changes for Ranger Atlas plugin">
+        <task xsi:type="configure" id="hdp_2_6_maint_ranger_atlas_plugin_cluster_name"/>
+      </execute-stage>
+
+      <!-- KMS -->
+      <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Apply config changes for Ranger Kms plugin">
+        <task xsi:type="configure" id="hdp_2_6_maint_ranger_kms_plugin_cluster_name"/>
+      </execute-stage>
 
     </group>
-    -->
+    
 
     <!--
     After processing this group, the user-specified Kerberos descriptor will be updated to work with

http://git-wip-us.apache.org/repos/asf/ambari/blob/112f7d4e/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
index 01f11e4..fd72e4d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
@@ -569,6 +569,7 @@
     <service name="RANGER_KMS">
       <component name="RANGER_KMS_SERVER">
         <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_6_maint_ranger_kms_plugin_cluster_name"/>
           <task xsi:type="execute" hosts="any" sequential="true">
             <summary>Upgrading Ranger KMS database schema</summary>
             <script>scripts/kms_server.py</script>
@@ -592,6 +593,10 @@
 
     <service name="KAFKA">
       <component name="KAFKA_BROKER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_6_maint_ranger_kafka_plugin_cluster_name"/>
+        </pre-upgrade>
+        <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -600,6 +605,10 @@
 
     <service name="HDFS">
       <component name="NAMENODE">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_6_maint_ranger_hdfs_plugin_cluster_name"/>
+        </pre-upgrade>
+        <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -674,6 +683,10 @@
       </component>
 
       <component name="RESOURCEMANAGER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_6_maint_ranger_yarn_plugin_cluster_name"/>
+        </pre-upgrade>
+        <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -694,6 +707,10 @@
 
     <service name="HBASE">
       <component name="HBASE_MASTER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_6_maint_ranger_hbase_plugin_cluster_name"/>
+        </pre-upgrade>
+        <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -766,6 +783,10 @@
       </component>
 
       <component name="HIVE_SERVER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_6_maint_ranger_hive_plugin_cluster_name"/>
+        </pre-upgrade>
+        <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -914,6 +935,10 @@
 
     <service name="KNOX">
       <component name="KNOX_GATEWAY">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_6_maint_ranger_knox_plugin_cluster_name"/>
+        </pre-upgrade>
+        <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -922,6 +947,10 @@
 
     <service name="STORM">
       <component name="NIMBUS">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_6_maint_ranger_storm_plugin_cluster_name"/>
+        </pre-upgrade>
+        <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -986,6 +1015,10 @@
 
     <service name="ATLAS">
       <component name="ATLAS_SERVER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_6_maint_ranger_atlas_plugin_cluster_name"/>
+        </pre-upgrade>
+        <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
         <upgrade>
           <task xsi:type="restart-task"/>
         </upgrade>


[05/41] ambari git commit: AMBARI-20665 Merge duplicated logic of Persist. (atkach)

Posted by ao...@apache.org.
AMBARI-20665 Merge duplicated logic of Persist. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1352fa90
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1352fa90
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1352fa90

Branch: refs/heads/branch-3.0-perf
Commit: 1352fa908c5b07d5a8e44fa0b67da2beb382ba90
Parents: 88b82fe
Author: Andrii Tkach <at...@apache.org>
Authored: Mon Apr 3 19:32:31 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/assets/test/tests.js             |   1 +
 ambari-web/app/controllers/application.js       |   2 +-
 ambari-web/app/controllers/experimental.js      |   2 +-
 .../global/errors_handler_controller.js         |   2 +-
 .../global/user_settings_controller.js          |   2 +-
 .../global/wizard_watcher_controller.js         |   2 +-
 ambari-web/app/controllers/installer.js         |   2 +-
 ambari-web/app/controllers/wizard.js            |   7 +-
 ambari-web/app/mixins.js                        |   3 +-
 ambari-web/app/mixins/common/persist.js         | 166 +++++++++++++++++++
 ambari-web/app/mixins/common/persist_mixin.js   |  45 -----
 ambari-web/app/mixins/common/userPref.js        | 126 --------------
 ambari-web/app/models/cluster_states.js         |   3 +-
 ambari-web/app/utils/ajax/ajax.js               |  24 +--
 ambari-web/app/utils/persist.js                 | 101 -----------
 .../configs/service_configs_by_category_view.js |   2 +-
 ambari-web/app/views/common/table_view.js       |   2 +-
 ambari-web/app/views/main/dashboard/widgets.js  |   2 +-
 .../app/views/main/service/info/summary.js      |   2 +-
 ambari-web/test/controllers/installer_test.js   |   4 +-
 .../test/controllers/main/service/item_test.js  |   2 +-
 ambari-web/test/controllers/wizard_test.js      |   4 +-
 ambari-web/test/mixins/common/persist_test.js   | 125 ++++++++++++++
 ambari-web/test/views/common/table_view_test.js |  14 +-
 .../test/views/main/dashboard/widgets_test.js   |   2 +-
 25 files changed, 328 insertions(+), 319 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index 8859a29..ef8d0bc 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -186,6 +186,7 @@ var files = [
   'test/mixins/common/serverValidator_test',
   'test/mixins/common/table_server_view_mixin_test',
   'test/mixins/common/widget_mixin_test',
+  'test/mixins/common/persist_test',
   'test/mixins/main/host/details/host_components/decommissionable_test',
   'test/mixins/main/host/details/host_components/install_component_test',
   'test/mixins/main/service/configs/widget_popover_support_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/controllers/application.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/application.js b/ambari-web/app/controllers/application.js
index df0a8e8..580f337 100644
--- a/ambari-web/app/controllers/application.js
+++ b/ambari-web/app/controllers/application.js
@@ -19,7 +19,7 @@
 
 var App = require('app');
 
-App.ApplicationController = Em.Controller.extend(App.UserPref, {
+App.ApplicationController = Em.Controller.extend(App.Persist, {
 
   name: 'applicationController',
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/controllers/experimental.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/experimental.js b/ambari-web/app/controllers/experimental.js
index bf5c529..f4e22ea 100644
--- a/ambari-web/app/controllers/experimental.js
+++ b/ambari-web/app/controllers/experimental.js
@@ -17,7 +17,7 @@
 
 var App = require('app');
 
-App.ExperimentalController = Em.Controller.extend(App.UserPref, {
+App.ExperimentalController = Em.Controller.extend(App.Persist, {
   name: 'experimentalController',
   supports: function () {
     return Em.keys(App.get('supports')).map(function (sup) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/controllers/global/errors_handler_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/global/errors_handler_controller.js b/ambari-web/app/controllers/global/errors_handler_controller.js
index c5b1067..966fcab 100644
--- a/ambari-web/app/controllers/global/errors_handler_controller.js
+++ b/ambari-web/app/controllers/global/errors_handler_controller.js
@@ -18,7 +18,7 @@
 
 var App = require('app');
 
-App.ErrorsHandlerController = Em.Controller.extend(App.UserPref, {
+App.ErrorsHandlerController = Em.Controller.extend(App.Persist, {
 
   name: 'errorsHandlerController',
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/controllers/global/user_settings_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/global/user_settings_controller.js b/ambari-web/app/controllers/global/user_settings_controller.js
index e971cba..68d7e65 100644
--- a/ambari-web/app/controllers/global/user_settings_controller.js
+++ b/ambari-web/app/controllers/global/user_settings_controller.js
@@ -26,7 +26,7 @@ var timezoneUtils = require('utils/date/timezone');
  *
  * @class UserSettingsController
  */
-App.UserSettingsController = Em.Controller.extend(App.UserPref, {
+App.UserSettingsController = Em.Controller.extend(App.Persist, {
 
   name: 'userSettingsController',
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/controllers/global/wizard_watcher_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/global/wizard_watcher_controller.js b/ambari-web/app/controllers/global/wizard_watcher_controller.js
index e2770fa..1562d7c 100644
--- a/ambari-web/app/controllers/global/wizard_watcher_controller.js
+++ b/ambari-web/app/controllers/global/wizard_watcher_controller.js
@@ -18,7 +18,7 @@
 
 var App = require('app');
 
-App.WizardWatcherController = Em.Controller.extend(App.UserPref, {
+App.WizardWatcherController = Em.Controller.extend(App.Persist, {
   name: 'wizardWatcherController',
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/controllers/installer.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/installer.js b/ambari-web/app/controllers/installer.js
index 553ec43..0946ed8 100644
--- a/ambari-web/app/controllers/installer.js
+++ b/ambari-web/app/controllers/installer.js
@@ -21,7 +21,7 @@ var App = require('app');
 var stringUtils = require('utils/string_utils');
 var validator = require('utils/validator');
 
-App.InstallerController = App.WizardController.extend(App.UserPref, {
+App.InstallerController = App.WizardController.extend(App.Persist, {
 
   name: 'installerController',
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/controllers/wizard.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard.js b/ambari-web/app/controllers/wizard.js
index d9d05bc..49bdac6 100644
--- a/ambari-web/app/controllers/wizard.js
+++ b/ambari-web/app/controllers/wizard.js
@@ -18,7 +18,6 @@
 
 
 var App = require('app');
-var persistUtils = require('utils/persist');
 
 require('models/host');
 
@@ -915,7 +914,7 @@ App.WizardController = Em.Controller.extend(App.LocalStorage, App.ThemesMappingM
   loadServiceConfigProperties: function () {
     var dfd = $.Deferred();
     var self = this;
-    this.getPersistentProperty('serviceConfigProperties').always(function(data) {
+    this.getDecompressedData('serviceConfigProperties').always(function(data) {
       if (data && !data.error) {
         self.set('content.serviceConfigProperties', data);
       }
@@ -971,7 +970,7 @@ App.WizardController = Em.Controller.extend(App.LocalStorage, App.ThemesMappingM
     }, this);
     this.set('content.serviceConfigProperties', serviceConfigProperties);
     this.setDBProperty('fileNamesToUpdate', fileNamesToUpdate);
-    return this.setPersistentProperty('serviceConfigProperties', serviceConfigProperties);
+    return this.postCompressedData('serviceConfigProperties', serviceConfigProperties);
   },
 
   isExcludedConfig: function (configProperty) {
@@ -1436,7 +1435,7 @@ App.WizardController = Em.Controller.extend(App.LocalStorage, App.ThemesMappingM
 
   clearServiceConfigProperties: function() {
     this.get('content.serviceConfigProperties', null);
-    return this.removePersistentProperty('serviceConfigProperties');
+    return this.postCompressedData('serviceConfigProperties', '');
   },
 
   saveTasksStatuses: function (tasksStatuses) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/mixins.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins.js b/ambari-web/app/mixins.js
index 4fdfa54..06c69f7 100644
--- a/ambari-web/app/mixins.js
+++ b/ambari-web/app/mixins.js
@@ -23,8 +23,7 @@ require('mixins/common/blueprint');
 require('mixins/common/kdc_credentials_controller_mixin');
 require('mixins/common/localStorage');
 require('mixins/common/infinite_scroll_mixin');
-require('mixins/common/persist_mixin');
-require('mixins/common/userPref');
+require('mixins/common/persist');
 require('mixins/common/reload_popup');
 require('mixins/common/serverValidator');
 require('mixins/common/table_server_view_mixin');

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/mixins/common/persist.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/persist.js b/ambari-web/app/mixins/common/persist.js
new file mode 100644
index 0000000..4475112
--- /dev/null
+++ b/ambari-web/app/mixins/common/persist.js
@@ -0,0 +1,166 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+var LZString = require('utils/lz-string');
+
+/**
+ * Small mixin for processing user preferences
+ * Provide methods to save/load some values in <code>persist</code> storage
+ * Save available only for admin users!
+ * When using this mixin you should redeclare methods:
+ * <ul>
+ *   <li>getUserPrefSuccessCallback</li>
+ *   <li>getUserPrefErrorCallback</li>
+ *   <li>postUserPrefSuccessCallback</li>
+ *   <li>postUserPrefErrorCallback</li>
+ * </ul>
+ * @type {Em.Mixin}
+ */
+App.Persist = Em.Mixin.create({
+
+  /**
+   * Additional to request data
+   * @type {object}
+   */
+  additionalData: {},
+
+  /**
+   * Get persist value from server with persistKey
+   * @param {String} key
+   */
+  getUserPref: function(key) {
+    return App.ajax.send({
+      name: 'persist.get',
+      sender: this,
+      data: {
+        key: key,
+        data: this.get('additionalData')
+      },
+      success: 'getUserPrefSuccessCallback',
+      error: 'getUserPrefErrorCallback'
+    });
+  },
+
+  /**
+   *
+   * @param {string} key
+   * @returns {$.Deferred}
+   */
+  getDecompressedData: function(key) {
+    var dfd = $.Deferred();
+    App.ajax.send({
+      name: 'persist.get.text',
+      sender: this,
+      data: {
+        key: key
+      }
+    }).always(function(data, textStatus, error) {
+      if (data && typeof data === 'string') {
+        dfd.resolve(JSON.parse(LZString.decompressFromBase64(data)));
+      } else {
+        dfd.reject({error: error});
+      }
+    });
+    return dfd.promise();
+  },
+
+  /**
+   * Should be redefined in objects that use this mixin
+   * @param {*} response
+   * @param {Object} request
+   * @param {Object} data
+   * @returns {*}
+   */
+  getUserPrefSuccessCallback: function (response, request, data) {},
+
+  /**
+   * Should be redefined in objects that use this mixin
+   * @param {Object} request
+   * @param {Object} ajaxOptions
+   * @param {String} error
+   */
+  getUserPrefErrorCallback: function (request, ajaxOptions, error) {},
+
+  /**
+   * Post persist key/value to server, value is object
+   * Only for admin users!
+   * @param {String} key
+   * @param {Object} value
+   */
+  postUserPref: function (key, value) {
+    if (!App.isAuthorized('CLUSTER.MANAGE_USER_PERSISTED_DATA')) {
+      return $.Deferred().reject().promise();
+    }
+    var keyValuePair = {};
+    keyValuePair[key] = JSON.stringify(value);
+    return this.post(keyValuePair);
+  },
+
+  /**
+   *
+   * @param {string} key
+   * @param {Object} value
+   * @returns {$.ajax}
+   */
+  postCompressedData: function (key, value) {
+    var keyValuePair = {};
+    keyValuePair[key] = !Em.isEmpty(value) ? LZString.compressToBase64(JSON.stringify(value)) : '';
+    return this.post(keyValuePair);
+  },
+
+  post: function(keyValuePair) {
+    return App.ajax.send({
+      'name': 'persist.post',
+      'sender': this,
+      'beforeSend': 'postUserPrefBeforeSend',
+      'data': {
+        'keyValuePair': keyValuePair
+      },
+      'success': 'postUserPrefSuccessCallback',
+      'error': 'postUserPrefErrorCallback'
+    });
+  },
+
+  /**
+   * Should be redefined in objects that use this mixin
+   * @param {*} response
+   * @param {Object} request
+   * @param {Object} data
+   * @returns {*}
+   */
+  postUserPrefSuccessCallback: function (response, request, data) {},
+
+  /**
+   * Should be redefined in objects that use this mixin
+   * @param {Object} request
+   * @param {Object} ajaxOptions
+   * @param {String} error
+   */
+  postUserPrefErrorCallback: function(request, ajaxOptions, error) {},
+
+  /**
+   * Little log before post request
+   * @param {Object} request
+   * @param {Object} ajaxOptions
+   * @param {Object} data
+   */
+  postUserPrefBeforeSend: function(request, ajaxOptions, data){
+  }
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/mixins/common/persist_mixin.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/persist_mixin.js b/ambari-web/app/mixins/common/persist_mixin.js
deleted file mode 100644
index 0b81364..0000000
--- a/ambari-web/app/mixins/common/persist_mixin.js
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-var App = require('app');
-var persistUtils = require('utils/persist');
-
-App.Persist = Em.Mixin.create({
-
-  persistNamespace: function() {
-    var name = this.get('name');
-    if (Em.isNone(name)) {
-      name = this.get('controller.name');
-    }
-    return name.capitalize().replace('Controller', '');
-  }.property('name'),
-
-  getPersistentProperty: function(key) {
-    return persistUtils.get(this.get('persistNamespace') + '__' + key);
-  },
-
-  setPersistentProperty: function(key, value) {
-    return persistUtils.put(this.get('persistNamespace') + '__' + key, value);
-  },
-
-  removePersistentProperty: function(key) {
-    return persistUtils.remove(this.get('persistNamespace') + '__' + key);
-  }
-
-
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/mixins/common/userPref.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/userPref.js b/ambari-web/app/mixins/common/userPref.js
deleted file mode 100644
index 54afe4a..0000000
--- a/ambari-web/app/mixins/common/userPref.js
+++ /dev/null
@@ -1,126 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-var App = require('app');
-
-/**
- * Small mixin for processing user preferences
- * Provide methods to save/load some values in <code>persist</code> storage
- * Save available only for admin users!
- * When using this mixin you should redeclare methods:
- * <ul>
- *   <li>getUserPrefSuccessCallback</li>
- *   <li>getUserPrefErrorCallback</li>
- *   <li>postUserPrefSuccessCallback</li>
- *   <li>postUserPrefErrorCallback</li>
- * </ul>
- * @type {Em.Mixin}
- */
-App.UserPref = Em.Mixin.create({
-
-  /**
-   * Additional to request data
-   * @type {object}
-   */
-  additionalData: {},
-
-  /**
-   * Get persist value from server with persistKey
-   * @param {String} key
-   */
-  getUserPref: function(key) {
-    return App.ajax.send({
-      name: 'settings.get.user_pref',
-      sender: this,
-      data: {
-        key: key,
-        data: this.get('additionalData')
-      },
-      success: 'getUserPrefSuccessCallback',
-      error: 'getUserPrefErrorCallback'
-    });
-  },
-
-  /**
-   * Should be redeclared in objects that use this mixin
-   * @param {*} response
-   * @param {Object} request
-   * @param {Object} data
-   * @returns {*}
-   */
-  getUserPrefSuccessCallback: function (response, request, data) {},
-
-  /**
-   * Should be redeclared in objects that use this mixin
-   * @param {Object} request
-   * @param {Object} ajaxOptions
-   * @param {String} error
-   */
-  getUserPrefErrorCallback: function (request, ajaxOptions, error) {},
-
-  /**
-   * Post persist key/value to server, value is object
-   * Only for admin users!
-   * @param {String} key
-   * @param {Object} value
-   */
-  postUserPref: function (key, value) {
-    if (!App.isAuthorized('CLUSTER.MANAGE_USER_PERSISTED_DATA')) {
-      return $.Deferred().reject().promise();
-    }
-    var keyValuePair = {};
-    keyValuePair[key] = JSON.stringify(value);
-    return App.ajax.send({
-      'name': 'settings.post.user_pref',
-      'sender': this,
-      'beforeSend': 'postUserPrefBeforeSend',
-      'data': {
-        'keyValuePair': keyValuePair
-      },
-      'success': 'postUserPrefSuccessCallback',
-      'error': 'postUserPrefErrorCallback'
-    });
-  },
-
-  /**
-   * Should be redeclared in objects that use this mixin
-   * @param {*} response
-   * @param {Object} request
-   * @param {Object} data
-   * @returns {*}
-   */
-  postUserPrefSuccessCallback: function (response, request, data) {},
-
-  /**
-   * Should be redeclared in objects that use this mixin
-   * @param {Object} request
-   * @param {Object} ajaxOptions
-   * @param {String} error
-   */
-  postUserPrefErrorCallback: function(request, ajaxOptions, error) {},
-
-  /**
-   * Little log before post request
-   * @param {Object} request
-   * @param {Object} ajaxOptions
-   * @param {Object} data
-   */
-  postUserPrefBeforeSend: function(request, ajaxOptions, data){
-  }
-
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/models/cluster_states.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/cluster_states.js b/ambari-web/app/models/cluster_states.js
index cbc134d..f111c3a 100644
--- a/ambari-web/app/models/cluster_states.js
+++ b/ambari-web/app/models/cluster_states.js
@@ -16,9 +16,8 @@
  * limitations under the License.
  */
 var App = require('app');
-require('mixins/common/userPref');
 var LZString = require('utils/lz-string');
-App.clusterStatus = Em.Object.create(App.UserPref, {
+App.clusterStatus = Em.Object.create(App.Persist, {
 
   /**
    * Cluster name

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index e8be31c..2d157fe 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -1350,20 +1350,6 @@ var urls = {
       };
     }
   },
-  'settings.get.user_pref': {
-    'real': '/persist/{key}',
-    'mock': '/data/user_settings/{key}.json'
-  },
-  'settings.post.user_pref': {
-    'real': '/persist',
-    'mock': '',
-    'type': 'POST',
-    'format': function (data) {
-      return {
-        data: JSON.stringify(data.keyValuePair)
-      }
-    }
-  },
   'cluster.load_cluster_name': {
     'real': '/clusters?fields=Clusters/security_type',
     'mock': '/data/clusters/info.json'
@@ -2200,7 +2186,7 @@ var urls = {
     'mock': '/data/requests/host_check/1.json'
   },
 
-  'persist.get': {
+  'persist.get.text': {
     'real': '/persist/{key}',
     'mock': '',
     'type': 'GET',
@@ -2210,7 +2196,13 @@ var urls = {
       }
     }
   },
-  'persist.put': {
+
+  'persist.get': {
+    'real': '/persist/{key}',
+    'mock': '',
+    'type': 'GET'
+  },
+  'persist.post': {
     'real': '/persist',
     'mock': '',
     'type': 'POST',

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/utils/persist.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/persist.js b/ambari-web/app/utils/persist.js
deleted file mode 100644
index 3d3164f..0000000
--- a/ambari-web/app/utils/persist.js
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-var LZString = require('utils/lz-string');
-/**
- * Persist storage managing utils. It helps to put and get values from persisted storage
- * by api (/api/v1/persist).
- * @type {Object}
- */
-module.exports = {
-  /**
-   * Get item from persist storage by key and optional path.
-   *
-   * @param  {String} key persist key to get e.g 'CLUSTER_STATUS', will fetch data from
-   * /api/v1/persist/CLUSTER_STATUS
-   * @param  {String} [path=null] Em.get compatible attributes path
-   * @return {$.Deferred}
-   */
-  get: function(key, path) {
-    var dfd = $.Deferred();
-    App.ajax.send({
-      name: 'persist.get',
-      sender: this,
-      data: {
-        deferred: dfd,
-        key: key || '',
-        path: path
-      },
-      success: 'getSuccessCallback',
-      error: 'getErrorCallback'
-    });
-    return dfd.promise();
-  },
-
-  getSuccessCallback: function(data, xhr, params) {
-    var extracted, response = data;
-    try {
-      response = JSON.parse(response);
-    } catch(e) { }
-    if (Em.isEmpty(data)) {
-      params.deferred.resolve(null);
-      return;
-    }
-    if (typeof response === 'string') {
-      extracted = JSON.parse(LZString.decompressFromBase64(response));
-      params.deferred.resolve(params.path ? Em.get(extracted, params.path) : extracted);
-    } else {
-      params.deferred.resolve(response);
-    }
-  },
-
-  getErrorCallback: function(request, ajaxOptions, error, opt, params) {
-    params.deferred.reject({
-      request: request,
-      error: error
-    });
-  },
-
-  /**
-   * Update key value.
-   *
-   * @param  {String} key
-   * @param  {Object} value value to save
-   * @return {$.Deferred}
-   */
-  put: function(key, value) {
-    var kv = {};
-    kv[key] = !Em.isEmpty(value) ? LZString.compressToBase64(JSON.stringify(value)) : '';
-    return App.ajax.send({
-      name: 'persist.put',
-      sender: this,
-      data: {
-        keyValuePair: kv
-      },
-      success: 'putSuccessCallback',
-      error: 'putErrorCallback'
-    });
-  },
-
-  putSuccessCallback: function() {},
-  putErrorCallback: function() {},
-
-  remove: function(key) {
-    return this.put(key, '');
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/views/common/configs/service_configs_by_category_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/service_configs_by_category_view.js b/ambari-web/app/views/common/configs/service_configs_by_category_view.js
index 16c828c..4058020 100644
--- a/ambari-web/app/views/common/configs/service_configs_by_category_view.js
+++ b/ambari-web/app/views/common/configs/service_configs_by_category_view.js
@@ -21,7 +21,7 @@ var App = require('app');
 var validator = require('utils/validator');
 require('utils/configs/modification_handlers/modification_handler');
 
-App.ServiceConfigsByCategoryView = Em.View.extend(App.UserPref, App.ConfigOverridable, {
+App.ServiceConfigsByCategoryView = Em.View.extend(App.Persist, App.ConfigOverridable, {
 
   templateName: require('templates/common/configs/service_config_category'),
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/views/common/table_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/table_view.js b/ambari-web/app/views/common/table_view.js
index c394d47..e14d283 100644
--- a/ambari-web/app/views/common/table_view.js
+++ b/ambari-web/app/views/common/table_view.js
@@ -19,7 +19,7 @@
 var App = require('app');
 var filters = require('views/common/filter_view');
 
-App.TableView = Em.View.extend(App.UserPref, {
+App.TableView = Em.View.extend(App.Persist, {
 
   init: function() {
     this.set('filterConditions', []);

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/views/main/dashboard/widgets.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets.js b/ambari-web/app/views/main/dashboard/widgets.js
index 16840a5..2850200 100644
--- a/ambari-web/app/views/main/dashboard/widgets.js
+++ b/ambari-web/app/views/main/dashboard/widgets.js
@@ -28,7 +28,7 @@ const WidgetObject = Em.Object.extend({
   isVisible: true
 });
 
-App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, App.TimeRangeMixin, {
+App.MainDashboardWidgetsView = Em.View.extend(App.Persist, App.LocalStorage, App.TimeRangeMixin, {
   name: 'mainDashboardWidgetsView',
   templateName: require('templates/main/dashboard/widgets'),
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/app/views/main/service/info/summary.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/summary.js b/ambari-web/app/views/main/service/info/summary.js
index 378a8b3..551a3fd 100644
--- a/ambari-web/app/views/main/service/info/summary.js
+++ b/ambari-web/app/views/main/service/info/summary.js
@@ -21,7 +21,7 @@ var misc = require('utils/misc');
 require('views/main/service/service');
 require('data/service_graph_config');
 
-App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin, {
+App.MainServiceInfoSummaryView = Em.View.extend(App.Persist, App.TimeRangeMixin, {
   templateName: require('templates/main/service/info/summary'),
   /**
    * @property {Number} chunkSize - number of columns in Metrics section

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/test/controllers/installer_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/installer_test.js b/ambari-web/test/controllers/installer_test.js
index 74556e8..d936ffc 100644
--- a/ambari-web/test/controllers/installer_test.js
+++ b/ambari-web/test/controllers/installer_test.js
@@ -787,12 +787,12 @@ describe('App.InstallerController', function () {
 
   describe('#loadServiceConfigProperties', function() {
     beforeEach(function () {
-      sinon.stub(installerController, 'getPersistentProperty').returns($.Deferred().resolve({
+      sinon.stub(installerController, 'getDecompressedData').returns($.Deferred().resolve({
         value: 2
       }).promise());
     });
     afterEach(function () {
-      installerController.getPersistentProperty.restore();
+      installerController.getDecompressedData.restore();
     });
     it ('Should load service config property', function() {
       installerController.loadServiceConfigProperties();

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/test/controllers/main/service/item_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/service/item_test.js b/ambari-web/test/controllers/main/service/item_test.js
index 69a02de..7f50620 100644
--- a/ambari-web/test/controllers/main/service/item_test.js
+++ b/ambari-web/test/controllers/main/service/item_test.js
@@ -20,7 +20,7 @@ App = require('app');
 require('ember');
 require('models/host_component');
 require('views/common/modal_popup');
-require('mixins/common/userPref');
+require('mixins/common/persist');
 require('controllers/application');
 require('controllers/global/background_operations_controller');
 require('controllers/global/cluster_controller');

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/test/controllers/wizard_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/wizard_test.js b/ambari-web/test/controllers/wizard_test.js
index 922cc90..1a00f24 100644
--- a/ambari-web/test/controllers/wizard_test.js
+++ b/ambari-web/test/controllers/wizard_test.js
@@ -1055,7 +1055,7 @@ describe('App.WizardController', function () {
       sinon.stub(c, 'setDBProperty', Em.K);
       sinon.stub(c, 'setDBProperties', Em.K);
       sinon.stub(c, 'getDBProperty').withArgs('fileNamesToUpdate').returns([]);
-      sinon.stub(c, 'setPersistentProperty', Em.K);
+      sinon.stub(c, 'postCompressedData', Em.K);
       sinon.stub(App.config, 'shouldSupportFinal').returns(true);
     });
 
@@ -1063,7 +1063,7 @@ describe('App.WizardController', function () {
       c.setDBProperty.restore();
       c.setDBProperties.restore();
       c.getDBProperty.restore();
-      c.setPersistentProperty.restore();
+      c.postCompressedData.restore();
       App.config.shouldSupportFinal.restore();
     });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/test/mixins/common/persist_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mixins/common/persist_test.js b/ambari-web/test/mixins/common/persist_test.js
new file mode 100644
index 0000000..592d9b7
--- /dev/null
+++ b/ambari-web/test/mixins/common/persist_test.js
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+var testHelpers = require('test/helpers');
+var LZString = require('utils/lz-string');
+
+describe('App.Persist', function () {
+  var mixin;
+
+  beforeEach(function () {
+    mixin = Em.Object.create(App.Persist, {
+      additionalData: {}
+    });
+  });
+
+  describe('#getUserPref', function() {
+
+    it('App.ajax.send should be called', function() {
+      mixin.getUserPref('foo');
+      var args = testHelpers.findAjaxRequest('name', 'persist.get');
+      expect(args[0]).to.be.eql({
+        name: 'persist.get',
+        sender: mixin,
+        data: {
+          key: 'foo',
+          data: {}
+        },
+        success: 'getUserPrefSuccessCallback',
+        error: 'getUserPrefErrorCallback'
+      });
+    });
+  });
+
+  describe('#getDecompressedData', function() {
+
+    it('App.ajax.send should be called', function() {
+      mixin.getDecompressedData('foo');
+      var args = testHelpers.findAjaxRequest('name', 'persist.get.text');
+      expect(args[0]).to.be.eql({
+        name: 'persist.get.text',
+        sender: mixin,
+        data: {
+          key: 'foo'
+        }
+      });
+    });
+  });
+
+  describe('#post', function() {
+
+    it('App.ajax.send should be called', function() {
+      mixin.post({"foo": "bar"});
+      var args = testHelpers.findAjaxRequest('name', 'persist.post');
+      expect(args[0]).to.be.eql({
+        'name': 'persist.post',
+        'sender': mixin,
+        'beforeSend': 'postUserPrefBeforeSend',
+        'data': {
+          'keyValuePair': {"foo": "bar"}
+        },
+        'success': 'postUserPrefSuccessCallback',
+        'error': 'postUserPrefErrorCallback'
+      });
+    });
+  });
+
+  describe('#postUserPref', function() {
+    beforeEach(function() {
+      sinon.stub(mixin, 'post');
+      this.mockAuthorize = sinon.stub(App, 'isAuthorized');
+    });
+    afterEach(function() {
+      mixin.post.restore();
+      this.mockAuthorize.restore();
+    });
+
+    it('post should be called when authorized', function() {
+      this.mockAuthorize.withArgs('CLUSTER.MANAGE_USER_PERSISTED_DATA').returns(true);
+      mixin.postUserPref('foo', {"foo": "bar"});
+      expect(mixin.post.calledWith({'foo': '{"foo":"bar"}'})).to.be.true;
+    });
+
+    it('post should not be called when authorized', function() {
+      this.mockAuthorize.withArgs('CLUSTER.MANAGE_USER_PERSISTED_DATA').returns(false);
+      mixin.postUserPref('foo', {"foo": "bar"});
+      expect(mixin.post.called).to.be.false;
+    });
+  });
+
+  describe('#postCompressedData', function() {
+    beforeEach(function() {
+      sinon.stub(mixin, 'post');
+      sinon.stub(LZString, 'compressToBase64', function(args) {return args;})
+    });
+    afterEach(function() {
+      mixin.post.restore();
+      LZString.compressToBase64.restore();
+    });
+
+    it('post should be called with object value', function() {
+      mixin.postCompressedData('foo', {"foo": "bar"});
+      expect(mixin.post.calledWith({'foo': '{"foo":"bar"}'})).to.be.true;
+    });
+    it('post should be called with empty value', function() {
+      mixin.postCompressedData('foo', null);
+      expect(mixin.post.calledWith({'foo': ''})).to.be.true;
+    });
+  });
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/test/views/common/table_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/common/table_view_test.js b/ambari-web/test/views/common/table_view_test.js
index 3d11e30..439fac4 100644
--- a/ambari-web/test/views/common/table_view_test.js
+++ b/ambari-web/test/views/common/table_view_test.js
@@ -21,7 +21,7 @@ require('utils/db');
 require('views/common/filter_view');
 require('views/common/sort_view');
 require('mixins');
-require('mixins/common/userPref');
+require('mixins/common/persist');
 require('views/common/table_view');
 
 function getView() {
@@ -60,7 +60,7 @@ describe('App.TableView', function () {
   describe('#updatePaging', function() {
 
     beforeEach(function() {
-      view = App.TableView.create(App.UserPref, {
+      view = App.TableView.create(App.Persist, {
         controller: Em.Object.create({}),
         displayLength: 10,
         startIndex: 1,
@@ -88,7 +88,7 @@ describe('App.TableView', function () {
   describe('#endIndex', function() {
 
     beforeEach(function() {
-      view = App.TableView.create(App.UserPref, {
+      view = App.TableView.create(App.Persist, {
         controller: Em.Object.create({}),
         displayLength: 10,
         startIndex: 1,
@@ -130,7 +130,7 @@ describe('App.TableView', function () {
   describe('#pageContent', function() {
 
     beforeEach(function() {
-      view = App.TableView.create(App.UserPref, {
+      view = App.TableView.create(App.Persist, {
         controller: Em.Object.create({}),
         displayLength: 10,
         startIndex: 1,
@@ -175,7 +175,7 @@ describe('App.TableView', function () {
   describe('#filtersUsedCalc', function() {
 
     beforeEach(function() {
-      view = App.TableView.create(App.UserPref, {
+      view = App.TableView.create(App.Persist, {
         controller: Em.Object.create({}),
         displayLength: 10,
         startIndex: 1,
@@ -209,7 +209,7 @@ describe('App.TableView', function () {
   describe('#nextPage', function() {
 
     beforeEach(function() {
-      view = App.TableView.create(App.UserPref, {
+      view = App.TableView.create(App.Persist, {
         controller: Em.Object.create({}),
         displayLength: 10,
         startIndex: 1,
@@ -249,7 +249,7 @@ describe('App.TableView', function () {
   describe('#previousPage', function() {
 
     beforeEach(function() {
-      view = App.TableView.create(App.UserPref, {
+      view = App.TableView.create(App.Persist, {
         controller: Em.Object.create({}),
         displayLength: 10,
         startIndex: 50,

http://git-wip-us.apache.org/repos/asf/ambari/blob/1352fa90/ambari-web/test/views/main/dashboard/widgets_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets_test.js b/ambari-web/test/views/main/dashboard/widgets_test.js
index 4700ac8..6e8a6eb 100644
--- a/ambari-web/test/views/main/dashboard/widgets_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets_test.js
@@ -19,7 +19,7 @@
 
 var App = require('app');
 require('messages');
-require('mixins/common/userPref');
+require('mixins/common/persist');
 require('mixins/common/localStorage');
 require('views/main/dashboard/widgets');
 


[20/41] ambari git commit: AMBARI-20653.Bad user experience in workflow credential creation.(M Madhan Mohan Reddy via padmapriyanitt)

Posted by ao...@apache.org.
AMBARI-20653.Bad user experience in workflow credential creation.(M Madhan Mohan Reddy via padmapriyanitt)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/13729ed2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/13729ed2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/13729ed2

Branch: refs/heads/branch-3.0-perf
Commit: 13729ed2c6918e1d183dcf779e6b55c7123c1325
Parents: 632ba71
Author: padmapriyanitt <pa...@gmail.com>
Authored: Tue Apr 4 11:55:13 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../ui/app/components/workflow-credentials.js          |  2 +-
 .../src/main/resources/ui/app/styles/app.less          |  3 ---
 .../ui/app/templates/components/credentials-config.hbs |  2 +-
 .../app/templates/components/workflow-credentials.hbs  | 13 ++++++++-----
 4 files changed, 10 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/13729ed2/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-credentials.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-credentials.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-credentials.js
index 1072ca4..1f1fc04 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-credentials.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-credentials.js
@@ -23,7 +23,7 @@ export default Ember.Component.extend(Ember.Evented, {
   initialize : function(){
     this.get('credentialsList').clear();
     this.get('childComponents').clear();
-    this.set('credentialsList', Ember.copy(this.get('workflowCredentials')));
+    this.set('credentialsList', this.get('workflowCredentials'));
   }.on('init'),
   rendered : function(){
     this.$('#workflow_credentials_dialog').modal({

http://git-wip-us.apache.org/repos/asf/ambari/blob/13729ed2/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
index 673ae6a..3c5e720 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
@@ -1267,9 +1267,6 @@ input:invalid {
     color: @defaultRed;
     cursor: pointer;
 }
-.credential-list {
-  margin-top: 37px;
-}
 .credential-list .fa-trash-o{
   color: @defaultRed;
   cursor: pointer;

http://git-wip-us.apache.org/repos/asf/ambari/blob/13729ed2/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/credentials-config.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/credentials-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/credentials-config.hbs
index 366920d..555bf98 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/credentials-config.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/credentials-config.hbs
@@ -66,7 +66,7 @@
         </div>
       </div>
       <div class="col-xs-24 pull-right">
-        <input class="btn btn-default marginright5" type="button" {{action "cancel"}} value="Cancel">
+        <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
         {{#if (eq mode 'create')}}
           <input class="btn btn-primary" type="button" {{action "add"}} value="Add">
         {{else}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/13729ed2/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-credentials.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-credentials.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-credentials.hbs
index 464221f..fa47612 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-credentials.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-credentials.hbs
@@ -24,13 +24,16 @@
       </div>
       <div class="modal-body">
         <div class=" panel panel-default">
-          <div class="panel-heading">Credentials</div>
-          <div class="panel-body handlerPanel">
-            <div id="credential-action-bar" class="form-group pull-right">
+          <div class="panel-heading clearfix">
+            <div class="pull-left paddingtop7">Credentials</div>
+            <div id="credential-action-bar" class="pull-right">
               <button id="dataset-create-btn" {{action 'createCredentials'}} type="button" class="btn btn-default">
                 <i class="fa fa-plus-circle"></i> Add
               </button>
             </div>
+          </div>
+          <div class="panel-body handlerPanel">
+
             <div class="paddingtop10">
               <ul class="list-group credential-list">
                 {{#each credentialsList as |credential index|}}
@@ -57,10 +60,10 @@
           {{#credentials-config credential=currentCredentials showCredentials="showCredentials" update="updateCredentials" mode="edit" cancel="cancelEditMode"}}{{/credentials-config}}
         {{/if}}
       </div>
-      <div class="modal-footer">
+      <!-- <div class="modal-footer">
         <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
         <button type="button" class="btn btn-primary" {{action "saveCredentials"}}>Save</button>
-      </div>
+      </div> -->
     </div>
   </div>
 </div>


[38/41] ambari git commit: AMBARI-20657. Usability: screen jumps when you scroll down (pallavkul)

Posted by ao...@apache.org.
AMBARI-20657. Usability: screen jumps when you scroll down (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f6fbe4ba
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f6fbe4ba
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f6fbe4ba

Branch: refs/heads/branch-3.0-perf
Commit: f6fbe4ba3660eb3659245cfb16f509f65a86b2aa
Parents: 8a2dfa4
Author: pallavkul <pa...@gmail.com>
Authored: Wed Apr 5 11:17:27 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../src/main/resources/ui/app/styles/app.scss   | 23 ++++++++++--------
 .../ui/app/styles/bootstrap-overrides.scss      | 25 ++++++++++----------
 .../resources/ui/app/templates/application.hbs  |  2 +-
 3 files changed, 27 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f6fbe4ba/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
index 650c1d6..c06e65e 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
@@ -38,7 +38,10 @@
 .main-wrapper {
   padding: 5px 0;
   background-color: #fff;
-  min-height: 100vh;
+}
+
+.top-application-header{
+  background-color: #e1e1e0;
 }
 
 .fa-1-5{
@@ -49,7 +52,7 @@
   font-size: 2em;
 }
 
-$database-search-background: lighten($body-bg, 10%);
+$database-search-background: lighten($base-bg, 10%);
 $database-search-text-color: $gray;
 .database-search, .multiple-database-search {
   color:  $database-search-text-color;
@@ -128,7 +131,7 @@ $database-search-text-color: $gray;
   }
 }
 
-$list-filter-header-background: lighten($body-bg, 10%);
+$list-filter-header-background: lighten($base-bg, 10%);
 $list-filter-text-color: $gray;
 
 .list-filter {
@@ -152,7 +155,7 @@ $list-filter-text-color: $gray;
 
 .table-list {
   .list-group-item {
-    background-color: lighten($body-bg, 10%);
+    background-color: lighten($base-bg, 10%);
   }
 }
 
@@ -174,10 +177,10 @@ $list-filter-text-color: $gray;
   }
 }
 
-$table-info-background: lighten($body-bg, 10%);
+$table-info-background: lighten($base-bg, 10%);
 .table-info {
 
-  background-color: $body-bg;
+  background-color: $base-bg;
   .table-header {
     border: 1px solid darken($table-info-background, 15%);
     p {
@@ -218,7 +221,7 @@ pre {
 }
 
 .scroll-fix {
-  background-color: lighten($body-bg, 5%);
+  background-color: lighten($base-bg, 5%);
   height: calc(100vh - 180px);
   overflow-y: scroll;
 }
@@ -718,8 +721,8 @@ pre {
 }
 
 .jobs-status {
-  border-top: 1px solid darken($body-bg, 10%);
-  border-bottom: 1px solid darken($body-bg, 10%);
+  border-top: 1px solid darken($base-bg, 10%);
+  border-bottom: 1px solid darken($base-bg, 10%);
   .strip {
     margin-top: 20px;
   }
@@ -969,4 +972,4 @@ rect.operator__box {
   position:relative;
   bottom:10px;
   right:-15px;
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6fbe4ba/contrib/views/hive20/src/main/resources/ui/app/styles/bootstrap-overrides.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/bootstrap-overrides.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/bootstrap-overrides.scss
index 4b7bec4..d3cf36e 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/styles/bootstrap-overrides.scss
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/bootstrap-overrides.scss
@@ -18,7 +18,8 @@
 
 $font-family-sans-serif: 'Roboto', "Helvetica Neue", Helvetica, Arial, sans-serif;
 
-$body-bg: #e1e1e0;
+$body-bg: #FFF;
+$base-bg: #e1e1e0;
 $border-radius-base:        3px;
 $border-radius-large:       4px;
 $border-radius-small:       2px;
@@ -26,10 +27,10 @@ $border-radius-small:       2px;
 
 // nav-tabs
 $nav-link-padding:                          10px 20px !default;
-$nav-tabs-border-color:                     darken($body-bg, 15%);
-$nav-tabs-link-hover-border-color:          darken($body-bg, 15%);
+$nav-tabs-border-color:                     darken($base-bg, 15%);
+$nav-tabs-link-hover-border-color:          darken($base-bg, 15%);
 $nav-tabs-active-link-hover-bg:             #fff;
-$nav-tabs-active-link-hover-border-color:   darken($body-bg, 15%);
+$nav-tabs-active-link-hover-border-color:   darken($base-bg, 15%);
 
 @import 'bootstrap';
 
@@ -40,18 +41,18 @@ $nav-tabs-active-link-hover-border-color:   darken($body-bg, 15%);
          &.active {
            background-color: $gray-dark;
            color: #fff;
-           border: 1px solid darken($body-bg, 15%);
+           border: 1px solid darken($base-bg, 15%);
            border-bottom: none;
            &.ember-transitioning-out {
-             background-color: $body-bg;
-             color: darken($body-bg, 50%);
+             background-color: $base-bg;
+             color: darken($base-bg, 50%);
              border: none;
            }
          }
          &.ember-transitioning-in {
            background-color: $gray-dark;
            color: #fff;
-           border: 1px solid darken($body-bg, 15%);
+           border: 1px solid darken($base-bg, 15%);
            border-bottom: none;
          }
        }
@@ -60,19 +61,19 @@ $nav-tabs-active-link-hover-border-color:   darken($body-bg, 15%);
   li {
     a {
       font-weight: bold;
-      color: darken($body-bg, 50%);
+      color: darken($base-bg, 50%);
       &.active {
         background-color: #fff;
-        border: 1px solid darken($body-bg, 15%);
+        border: 1px solid darken($base-bg, 15%);
         border-bottom: none;
         &.ember-transitioning-out {
-          background-color: $body-bg;
+          background-color: $base-bg;
           border: none;
         }
       }
       &.ember-transitioning-in {
         background-color: #fff;
-        border: 1px solid darken($body-bg, 15%);
+        border: 1px solid darken($base-bg, 15%);
         border-bottom: none;
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6fbe4ba/contrib/views/hive20/src/main/resources/ui/app/templates/application.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/application.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/application.hbs
index 53b2192..86c23a4 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/application.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/application.hbs
@@ -22,7 +22,7 @@
       {{alert-message flash=flash}}
     {{/each}}
   </div>
-  <div class="row">
+  <div class="row  top-application-header">
     <div class="col-md-12">
       {{#if serviceCheckCompleted}}
         {{top-application-bar}}


[15/41] ambari git commit: AMBARI-20669 Enable values binding for data attribute helper. (ababiichuk)

Posted by ao...@apache.org.
AMBARI-20669 Enable values binding for data attribute helper. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b38ba227
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b38ba227
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b38ba227

Branch: refs/heads/branch-3.0-perf
Commit: b38ba2277f77c18bdee4e94eb1ed3ff7ad7e0cb7
Parents: 14d2581
Author: ababiichuk <ab...@hortonworks.com>
Authored: Tue Apr 4 17:26:03 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/utils/helper.js | 60 ++++++++++++++++++++++++++-----------
 1 file changed, 42 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b38ba227/ambari-web/app/utils/helper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/helper.js b/ambari-web/app/utils/helper.js
index 30c0762..03a2e82 100644
--- a/ambari-web/app/utils/helper.js
+++ b/ambari-web/app/utils/helper.js
@@ -374,27 +374,51 @@ Em.Handlebars.registerHelper('highlight', function (property, words, fn) {
  * <div {{QAAttr "someText-and-{someProperty::another-text}"}}></div>
  *
  */
-Em.Handlebars.registerHelper('QAAttr', function(text, data) {
-  var self = this;
-  var textToReplace = text.match(/\{(.*?)\}/g);
+Em.Handlebars.registerHelper('QAAttr', function (text, options) {
+  const textToReplace = text.match(/\{(.*?)\}/g);
+  let attributes;
   if (textToReplace) {
-    textToReplace.forEach(function (t) {
-      var value,
-        expression = t.slice(1, t.length - 1),
-        conditionals = Em.View._parsePropertyPath(expression);
-      if (conditionals.classNames) {
-        var sourceValue = Em.Handlebars.getPath(self, conditionals.path, data);
-        value = sourceValue ? conditionals.className : conditionals.falsyClassName;
-      } else {
-        value = Em.Handlebars.getPath(self, expression, data);
-      }
-      if (Em.isNone(value)) {
-        value = '';
-      }
-      text = text.replace(t, value);
+    const id = ++Em.$.uuid,
+      expressions = textToReplace.map((str) => {
+        const parsed = Em.View._parsePropertyPath(str.slice(1, str.length - 1)),
+          normalized = Ember.Handlebars.normalizePath(this, parsed.path, options.data),
+          {classNames, className, falsyClassName} = parsed,
+          {root, path} = normalized;
+        return {src: str, classNames, className, falsyClassName, root, path};
+      }),
+      observer = () => {
+        let dataQA = text;
+        for (let i = expressions.length; i--;) {
+          const el = Em.tryInvoke(options.data.view, '$', [`[${attributes}]`]);
+          let e = expressions[i];
+          if (!el || el.length === 0) {
+            Em.removeObserver(e.root, e.path, invoker);
+            break;
+          }
+          let value,
+            sourceValue = Em.Handlebars.getPath(e.root, e.path, options.data);
+          if (e.classNames) {
+            value = sourceValue ? e.className : e.falsyClassName;
+          } else {
+            value = sourceValue;
+          }
+          if (Em.isNone(value)) {
+            value = '';
+          }
+          dataQA = dataQA.replace(e.src, value);
+          el.attr('data-qa', dataQA);
+        }
+      },
+      invoker = () => Em.run.once(observer);
+    attributes = `data-qa-bind-id="${id}"`;
+    expressions.forEach((e) => {
+      Em.addObserver(e.root, e.path, invoker);
     });
+    Em.run.next(observer);
+  } else {
+    attributes = `data-qa="${text}"`;
   }
-  return new Em.Handlebars.SafeString('data-qa="' + text + '"');
+  return new Em.Handlebars.SafeString(attributes);
 });
 
 /**


[10/41] ambari git commit: AMBARI-19149. Code cleanup: StringBuffer

Posted by ao...@apache.org.
AMBARI-19149. Code cleanup: StringBuffer


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4f5ac096
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4f5ac096
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4f5ac096

Branch: refs/heads/branch-3.0-perf
Commit: 4f5ac0961ab7cec4c765299e9a7f90107ac85119
Parents: 9733808
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Mon Apr 3 14:22:05 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../java/org/apache/ambari/server/events/HostsRemovedEvent.java    | 2 +-
 .../ambari/server/security/encryption/CredentialProvider.java      | 2 +-
 .../apache/ambari/server/state/stack/StackRoleCommandOrder.java    | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5ac096/ambari-server/src/main/java/org/apache/ambari/server/events/HostsRemovedEvent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/HostsRemovedEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/HostsRemovedEvent.java
index 4145860..5d59b14 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/HostsRemovedEvent.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/HostsRemovedEvent.java
@@ -79,7 +79,7 @@ public class HostsRemovedEvent extends AmbariEvent {
    */
   @Override
   public String toString() {
-    final StringBuffer sb = new StringBuffer("HostsRemovedEvent{");
+    final StringBuilder sb = new StringBuilder("HostsRemovedEvent{");
     sb.append("m_clusters=").append(m_clusters);
     sb.append(", m_hosts=").append(m_hosts);
     sb.append('}');

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5ac096/ambari-server/src/main/java/org/apache/ambari/server/security/encryption/CredentialProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/encryption/CredentialProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/security/encryption/CredentialProvider.java
index 0521aa1..de74ad0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/encryption/CredentialProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/encryption/CredentialProvider.java
@@ -93,7 +93,7 @@ public class CredentialProvider {
   }
 
   private String generatePassword(int length) {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     Random r = new Random();
     for (int i = 0; i < length; i++) {
       sb.append(chars[r.nextInt(chars.length)]);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5ac096/ambari-server/src/main/java/org/apache/ambari/server/state/stack/StackRoleCommandOrder.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/StackRoleCommandOrder.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/StackRoleCommandOrder.java
index 9f9ded6..5e245af 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/StackRoleCommandOrder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/StackRoleCommandOrder.java
@@ -152,7 +152,7 @@ public class StackRoleCommandOrder {
 		for (String depKey : deps.keySet()) {
 		  Object depValue = deps.get(depKey);
 		  if (depValue instanceof Collection) {
-			StringBuffer buffer = new StringBuffer();
+			StringBuilder buffer = new StringBuilder();
 			for (Object o : ((Collection) depValue)) {
 				if (buffer.length() > 0) {
           buffer.append(",");


[06/41] ambari git commit: AMBARI-20596. Cleanup temporary files needed for downloading client configurations response -- fix checkstyle error (Attila Magyar via adoroszlai)

Posted by ao...@apache.org.
AMBARI-20596. Cleanup temporary files needed for downloading client configurations response -- fix checkstyle error (Attila Magyar via adoroszlai)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e0412e86
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e0412e86
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e0412e86

Branch: refs/heads/branch-3.0-perf
Commit: e0412e86af1f5f395b525621ee60bb725aeef980
Parents: 8bef3b4
Author: Attila Magyar <am...@hortonworks.com>
Authored: Mon Apr 3 14:05:05 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../controller/internal/ClientConfigResourceProviderTest.java       | 1 -
 1 file changed, 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e0412e86/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProviderTest.java
index c2ee4d6..e7c1588 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProviderTest.java
@@ -76,7 +76,6 @@ import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.UserGroupInfo;
 import org.apache.ambari.server.state.ValueAttributesInfo;
 import org.apache.ambari.server.utils.StageUtils;
-import org.apache.commons.io.FileUtils;
 import org.easymock.EasyMock;
 import org.junit.Assert;
 import org.junit.Test;


[18/41] ambari git commit: AMBARI-20648. Ambari Agent Distro/Conf Select Versions alert is not functioning correctly since missing format index when using Python 2.6 (alejandro)

Posted by ao...@apache.org.
AMBARI-20648. Ambari Agent Distro/Conf Select Versions alert is not functioning correctly since missing format index when using Python 2.6 (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6e5c12f6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6e5c12f6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6e5c12f6

Branch: refs/heads/branch-3.0-perf
Commit: 6e5c12f670fb372d699af8b96ebd7cda77c20334
Parents: 038f637
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Fri Mar 31 14:39:30 2017 -0700
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../main/resources/host_scripts/alert_version_select.py   | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6e5c12f6/ambari-server/src/main/resources/host_scripts/alert_version_select.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/host_scripts/alert_version_select.py b/ambari-server/src/main/resources/host_scripts/alert_version_select.py
index 118911f..0ce79e7 100644
--- a/ambari-server/src/main/resources/host_scripts/alert_version_select.py
+++ b/ambari-server/src/main/resources/host_scripts/alert_version_select.py
@@ -70,7 +70,7 @@ def execute(configurations={}, parameters={}, host_name=None):
     stack_tools_str = configurations[STACK_TOOLS]
 
     if stack_tools_str is None:
-      return (RESULT_STATE_UNKNOWN, ['{} is a required parameter for the script and the value is null'.format(STACK_TOOLS)])
+      return (RESULT_STATE_UNKNOWN, ['{0} is a required parameter for the script and the value is null'.format(STACK_TOOLS)])
 
     distro_select = "unknown-distro-select"
     try:
@@ -87,18 +87,18 @@ def execute(configurations={}, parameters={}, host_name=None):
       (code, out, versions) = unsafe_get_stack_versions()
 
       if code == 0:
-        msg.append("Ok. {}".format(distro_select))
+        msg.append("Ok. {0}".format(distro_select))
         if versions is not None and type(versions) is list and len(versions) > 0:
-          msg.append("Versions: {}".format(", ".join(versions)))
+          msg.append("Versions: {0}".format(", ".join(versions)))
         return (RESULT_STATE_OK, ["\n".join(msg)])
       else:
-        msg.append("Failed, check dir {} for unexpected contents.".format(stack_root_dir))
+        msg.append("Failed, check dir {0} for unexpected contents.".format(stack_root_dir))
         if out is not None:
           msg.append(out)
 
         return (RESULT_STATE_CRITICAL, ["\n".join(msg)])
     else:
-      msg.append("Ok. No stack root {} to check.".format(stack_root_dir))
+      msg.append("Ok. No stack root {0} to check.".format(stack_root_dir))
       return (RESULT_STATE_OK, ["\n".join(msg)])
   except Exception, e:
     return (RESULT_STATE_CRITICAL, [e.message])


[13/41] ambari git commit: AMBARI-20579. Ambari-server failed to stop (aonishuk)

Posted by ao...@apache.org.
AMBARI-20579. Ambari-server failed to stop (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/314e41eb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/314e41eb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/314e41eb

Branch: refs/heads/branch-3.0-perf
Commit: 314e41ebe06b70c4bf99eb70f383f81d6e5e2ea2
Parents: d972592
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Apr 3 17:33:11 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 ambari-server/src/main/python/ambari-server.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/314e41eb/ambari-server/src/main/python/ambari-server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari-server.py b/ambari-server/src/main/python/ambari-server.py
index c985d65..87cc6c2 100755
--- a/ambari-server/src/main/python/ambari-server.py
+++ b/ambari-server/src/main/python/ambari-server.py
@@ -175,10 +175,10 @@ def stop(args):
     logger.info("Waiting for server stop...")
 
     if not wait_for_server_to_stop(SERVER_STOP_TIMEOUT):
-      err = "Ambari-server failed to stop"
+      err = "Ambari-server failed to stop gracefully. Sending SIGKILL to it"
       print err
       logger.error(err)
-      raise FatalException(1, err)
+      os.kill(pid, signal.SIGKILL)
 
     pid_file_path = os.path.join(configDefaults.PID_DIR, PID_NAME)
     os.remove(pid_file_path)


[08/41] ambari git commit: AMBARI-20654.Workflow should retain job.properties in submission modal window.(M Madhan Mohan Reddy via padmapriyanitt)

Posted by ao...@apache.org.
AMBARI-20654.Workflow should retain job.properties in submission modal window.(M Madhan Mohan Reddy via padmapriyanitt)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5c91e400
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5c91e400
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5c91e400

Branch: refs/heads/branch-3.0-perf
Commit: 5c91e4003c75d6c71f58c18501791dbeba91418f
Parents: 27da4c8
Author: padmapriyanitt <pa...@gmail.com>
Authored: Mon Apr 3 18:21:47 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../src/main/resources/ui/app/components/flow-designer.js     | 1 +
 .../src/main/resources/ui/app/components/job-config.js        | 7 +++++++
 .../resources/ui/app/templates/components/flow-designer.hbs   | 2 +-
 3 files changed, 9 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5c91e400/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
index 5f885e7..fa7c861 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
@@ -114,6 +114,7 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
   showingStreamImport:false,
   fileInfo:Ember.Object.create(),
   isDraft: false,
+  jobConfigProperties: Ember.A([]),
   saveJobService : Ember.inject.service('save-job'),
   initialize : function(){
     var id = 'cy-' + Math.ceil(Math.random() * 1000);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5c91e400/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
index 15c1fbf..e9c7c15 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
@@ -68,6 +68,12 @@ export default Ember.Component.extend(Validations, {
     var configProperties = [];
     configProperties.pushObjects(this.extractJobParams());
     configProperties.pushObjects(this.extractJobProperties());
+    configProperties.forEach((configProperty)=>{
+      var oldConfigProp = this.jobConfigProperties.filterBy('name', configProperty.name);
+      if (oldConfigProp.length > 0) {
+          configProperty.value = oldConfigProp[0].value;
+      }
+    }, this);
     return configProperties;
   }),
   initialize :function(){
@@ -174,6 +180,7 @@ export default Ember.Component.extend(Validations, {
       return;
     };
     this.set('jobFilePath', Ember.copy(this.get('filePath')));
+    this.set("jobConfigProperties", Ember.copy(this.get("configMap")));
     var url = Ember.ENV.API_URL + "/submitJob?app.path=" + this.get("filePath") + "&overwrite=" + this.get("overwritePath");
     url = url + "&jobType=" + this.get('displayName').toUpperCase();
     var submitConfigs = this.get("configMap");

http://git-wip-us.apache.org/repos/asf/ambari/blob/5c91e400/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
index 0da9f71..429e874 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
@@ -273,7 +273,7 @@
   {{save-wf type='wf' close="closeSaveWorkflow" jobFilePath=workflowFilePath openFileBrowser="openFileBrowser" closeFileBrowser="closeFileBrowser" jobConfigs=configForSave}}
 {{/if}}
 {{#if showingWorkflowConfigProps}}
-  {{job-config type='wf' closeJobConfigs="closeWorkflowSubmitConfigs" jobFilePath=workflowFilePath tabInfo=tabInfo openFileBrowser="openFileBrowser" closeFileBrowser="closeFileBrowser" jobConfigs=workflowSubmitConfigs isDryrun=dryrun}}
+  {{job-config type='wf' closeJobConfigs="closeWorkflowSubmitConfigs" jobFilePath=workflowFilePath tabInfo=tabInfo openFileBrowser="openFileBrowser" closeFileBrowser="closeFileBrowser" jobConfigs=workflowSubmitConfigs isDryrun=dryrun jobConfigProperties=jobConfigProperties}}
 {{/if}}
 {{#if showGlobalConfig}}
   {{#global-config closeGlobalConfig="closeWorkflowGlobalProps" saveGlobalConfig="saveGlobalConfig" actionModel=globalConfig}}{{/global-config}}


[40/41] ambari git commit: AMBARI-20684. Implement a websocket adapter for stomp.py (aonishuk)

Posted by ao...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/utf8validator.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/utf8validator.py b/ambari-common/src/main/python/ambari_ws4py/utf8validator.py
new file mode 100644
index 0000000..50b19e5
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/utf8validator.py
@@ -0,0 +1,117 @@
+# coding=utf-8
+
+###############################################################################
+##
+##  Copyright 2011 Tavendo GmbH
+##
+##  Note:
+##
+##  This code is a Python implementation of the algorithm
+##
+##            "Flexible and Economical UTF-8 Decoder"
+##
+##  by Bjoern Hoehrmann
+##
+##       bjoern@hoehrmann.de
+##       http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+
+class Utf8Validator(object):
+    """
+    Incremental UTF-8 validator with constant memory consumption (minimal state).
+
+    Implements the algorithm "Flexible and Economical UTF-8 Decoder" by
+    Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/).
+    """
+
+    ## DFA transitions
+    UTF8VALIDATOR_DFA = [
+        0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 00..1f
+        0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 20..3f
+        0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 40..5f
+        0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 60..7f
+        1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, # 80..9f
+        7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, # a0..bf
+        8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, # c0..df
+        0xa,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x4,0x3,0x3, # e0..ef
+        0xb,0x6,0x6,0x6,0x5,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8, # f0..ff
+        0x0,0x1,0x2,0x3,0x5,0x8,0x7,0x1,0x1,0x1,0x4,0x6,0x1,0x1,0x1,0x1, # s0..s0
+        1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1, # s1..s2
+        1,2,1,1,1,1,1,2,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1, # s3..s4
+        1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,3,1,3,1,1,1,1,1,1, # s5..s6
+        1,3,1,1,1,1,1,3,1,3,1,1,1,1,1,1,1,3,1,1,1,1,1,1,1,1,1,1,1,1,1,1, # s7..s8
+    ]
+
+    UTF8_ACCEPT = 0
+    UTF8_REJECT = 1
+
+    def __init__(self):
+        self.reset()
+
+    def decode(self, b):
+        """
+        Eat one UTF-8 octet, and validate on the fly.
+
+        Returns UTF8_ACCEPT when enough octets have been consumed, in which case
+        self.codepoint contains the decoded Unicode code point.
+
+        Returns UTF8_REJECT when invalid UTF-8 was encountered.
+
+        Returns some other positive integer when more octets need to be eaten.
+        """
+        type = Utf8Validator.UTF8VALIDATOR_DFA[b]
+        if self.state != Utf8Validator.UTF8_ACCEPT:
+            self.codepoint = (b & 0x3f) | (self.codepoint << 6)
+        else:
+            self.codepoint = (0xff >> type) & b
+        self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + self.state * 16 + type]
+        return self.state
+
+    def reset(self):
+        """
+        Reset validator to start new incremental UTF-8 decode/validation.
+        """
+        self.state = Utf8Validator.UTF8_ACCEPT
+        self.codepoint = 0
+        self.i = 0
+
+    def validate(self, ba):
+        """
+        Incrementally validate a chunk of bytes provided as bytearray.
+
+        Will return a quad (valid?, endsOnCodePoint?, currentIndex, totalIndex).
+
+        As soon as an octet is encountered which renders the octet sequence
+        invalid, a quad with valid? == False is returned. currentIndex returns
+        the index within the currently consumed chunk, and totalIndex the
+        index within the total consumed sequence that was the point of bail out.
+        When valid? == True, currentIndex will be len(ba) and totalIndex the
+        total amount of consumed bytes.
+        """
+        state = self.state
+        DFA = Utf8Validator.UTF8VALIDATOR_DFA
+        i = 0  # make sure 'i' is set if when 'ba' is empty
+        for i, b in enumerate(ba):
+            ## optimized version of decode(), since we are not interested in actual code points
+            state = DFA[256 + (state << 4) + DFA[b]]
+            if state == Utf8Validator.UTF8_REJECT:
+                self.i += i
+                self.state = state
+                return False, False, i, self.i
+        self.i += i
+        self.state = state
+        return True, state == Utf8Validator.UTF8_ACCEPT, i, self.i

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-common/src/main/python/ambari_ws4py/websocket.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_ws4py/websocket.py b/ambari-common/src/main/python/ambari_ws4py/websocket.py
new file mode 100644
index 0000000..b5c1fd3
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_ws4py/websocket.py
@@ -0,0 +1,535 @@
+# -*- coding: utf-8 -*-
+import logging
+import socket
+import ssl
+import time
+import threading
+import types
+import errno
+
+try:
+    from OpenSSL.SSL import Error as pyOpenSSLError
+except ImportError:
+    class pyOpenSSLError(Exception):
+        pass
+
+from ambari_ws4py import WS_KEY, WS_VERSION
+from ambari_ws4py.exc import HandshakeError, StreamClosed
+from ambari_ws4py.streaming import Stream
+from ambari_ws4py.messaging import Message, PingControlMessage,\
+    PongControlMessage
+from ambari_ws4py.compat import basestring, unicode
+
+DEFAULT_READING_SIZE = 2
+
+logger = logging.getLogger('ambari_ws4py')
+
+__all__ = ['WebSocket', 'EchoWebSocket', 'Heartbeat']
+
+class Heartbeat(threading.Thread):
+    def __init__(self, websocket, frequency=2.0):
+        """
+        Runs at a periodic interval specified by
+        `frequency` by sending an unsolicitated pong
+        message to the connected peer.
+
+        If the message fails to be sent and a socket
+        error is raised, we close the websocket
+        socket automatically, triggering the `closed`
+        handler.
+        """
+        threading.Thread.__init__(self)
+        self.websocket = websocket
+        self.frequency = frequency
+
+    def __enter__(self):
+        if self.frequency:
+            self.start()
+        return self
+
+    def __exit__(self, exc_type, exc_value, exc_tb):
+        self.stop()
+
+    def stop(self):
+        self.running = False
+
+    def run(self):
+        self.running = True
+        while self.running:
+            time.sleep(self.frequency)
+            if self.websocket.terminated:
+                break
+
+            try:
+                self.websocket.send(PongControlMessage(data='beep'))
+            except socket.error:
+                logger.info("Heartbeat failed")
+                self.websocket.server_terminated = True
+                self.websocket.close_connection()
+                break
+
+class WebSocket(object):
+    """ Represents a websocket endpoint and provides a high level interface to drive the endpoint. """
+
+    def __init__(self, sock, protocols=None, extensions=None, environ=None, heartbeat_freq=None):
+        """ The ``sock`` is an opened connection
+        resulting from the websocket handshake.
+
+        If ``protocols`` is provided, it is a list of protocols
+        negotiated during the handshake as is ``extensions``.
+
+        If ``environ`` is provided, it is a copy of the WSGI environ
+        dictionnary from the underlying WSGI server.
+        """
+
+        self.stream = Stream(always_mask=False)
+        """
+        Underlying websocket stream that performs the websocket
+        parsing to high level objects. By default this stream
+        never masks its messages. Clients using this class should
+        set the ``stream.always_mask`` fields to ``True``
+        and ``stream.expect_masking`` fields to ``False``.
+        """
+
+        self.protocols = protocols
+        """
+        List of protocols supported by this endpoint.
+        Unused for now.
+        """
+
+        self.extensions = extensions
+        """
+        List of extensions supported by this endpoint.
+        Unused for now.
+        """
+
+        self.sock = sock
+        """
+        Underlying connection.
+        """
+
+        self._is_secure = hasattr(sock, '_ssl') or hasattr(sock, '_sslobj')
+        """
+        Tell us if the socket is secure or not.
+        """
+
+        self.client_terminated = False
+        """
+        Indicates if the client has been marked as terminated.
+        """
+
+        self.server_terminated = False
+        """
+        Indicates if the server has been marked as terminated.
+        """
+
+        self.reading_buffer_size = DEFAULT_READING_SIZE
+        """
+        Current connection reading buffer size.
+        """
+
+        self.environ = environ
+        """
+        WSGI environ dictionary.
+        """
+
+        self.heartbeat_freq = heartbeat_freq
+        """
+        At which interval the heartbeat will be running.
+        Set this to `0` or `None` to disable it entirely.
+        """
+        "Internal buffer to get around SSL problems"
+        self.buf = b''
+
+        self._local_address = None
+        self._peer_address = None
+
+    @property
+    def local_address(self):
+        """
+        Local endpoint address as a tuple
+        """
+        if not self._local_address:
+            self._local_address = self.sock.getsockname()
+            if len(self._local_address) == 4:
+                self._local_address = self._local_address[:2]
+        return self._local_address
+
+    @property
+    def peer_address(self):
+        """
+        Peer endpoint address as a tuple
+        """
+        if not self._peer_address:
+            self._peer_address = self.sock.getpeername()
+            if len(self._peer_address) == 4:
+                self._peer_address = self._peer_address[:2]
+        return self._peer_address
+
+    def opened(self):
+        """
+        Called by the server when the upgrade handshake
+        has succeeded.
+        """
+        pass
+
+    def close(self, code=1000, reason=''):
+        """
+        Call this method to initiate the websocket connection
+        closing by sending a close frame to the connected peer.
+        The ``code`` is the status code representing the
+        termination's reason.
+
+        Once this method is called, the ``server_terminated``
+        attribute is set. Calling this method several times is
+        safe as the closing frame will be sent only the first
+        time.
+
+        .. seealso:: Defined Status Codes http://tools.ietf.org/html/rfc6455#section-7.4.1
+        """
+        if not self.server_terminated:
+            self.server_terminated = True
+            try:
+                self._write(self.stream.close(code=code, reason=reason).single(mask=self.stream.always_mask))
+            except Exception as ex:
+                logger.error("Error when terminating the connection: %s", str(ex))
+
+    def closed(self, code, reason=None):
+        """
+        Called  when the websocket stream and connection are finally closed.
+        The provided ``code`` is status set by the other point and
+        ``reason`` is a human readable message.
+
+        .. seealso:: Defined Status Codes http://tools.ietf.org/html/rfc6455#section-7.4.1
+        """
+        pass
+
+    @property
+    def terminated(self):
+        """
+        Returns ``True`` if both the client and server have been
+        marked as terminated.
+        """
+        return self.client_terminated is True and self.server_terminated is True
+
+    @property
+    def connection(self):
+        return self.sock
+
+    def close_connection(self):
+        """
+        Shutdowns then closes the underlying connection.
+        """
+        if self.sock:
+            try:
+                self.sock.shutdown(socket.SHUT_RDWR)
+                self.sock.close()
+            except:
+                pass
+            finally:
+                self.sock = None
+
+    def ping(self, message):
+        """
+        Send a ping message to the remote peer.
+        The given `message` must be a unicode string.
+        """
+        self.send(PingControlMessage(message))
+
+    def ponged(self, pong):
+        """
+        Pong message, as a :class:`messaging.PongControlMessage` instance,
+        received on the stream.
+        """
+        pass
+
+    def received_message(self, message):
+        """
+        Called whenever a complete ``message``, binary or text,
+        is received and ready for application's processing.
+
+        The passed message is an instance of :class:`messaging.TextMessage`
+        or :class:`messaging.BinaryMessage`.
+
+        .. note:: You should override this method in your subclass.
+        """
+        pass
+
+    def unhandled_error(self, error):
+        """
+        Called whenever a socket, or an OS, error is trapped
+        by ambari_ws4py but not managed by it. The given error is
+        an instance of `socket.error` or `OSError`.
+
+        Note however that application exceptions will not go
+        through this handler. Instead, do make sure you
+        protect your code appropriately in `received_message`
+        or `send`.
+
+        The default behaviour of this handler is to log
+        the error with a message.
+        """
+        logger.exception("Failed to receive data")
+
+    def _write(self, b):
+        """
+        Trying to prevent a write operation
+        on an already closed websocket stream.
+
+        This cannot be bullet proof but hopefully
+        will catch almost all use cases.
+        """
+        if self.terminated or self.sock is None:
+            raise RuntimeError("Cannot send on a terminated websocket")
+
+        self.sock.sendall(b)
+
+    def send(self, payload, binary=False):
+        """
+        Sends the given ``payload`` out.
+
+        If ``payload`` is some bytes or a bytearray,
+        then it is sent as a single message not fragmented.
+
+        If ``payload`` is a generator, each chunk is sent as part of
+        fragmented message.
+
+        If ``binary`` is set, handles the payload as a binary message.
+        """
+        message_sender = self.stream.binary_message if binary else self.stream.text_message
+
+        if isinstance(payload, basestring) or isinstance(payload, bytearray):
+            m = message_sender(payload).single(mask=self.stream.always_mask)
+            self._write(m)
+
+        elif isinstance(payload, Message):
+            data = payload.single(mask=self.stream.always_mask)
+            self._write(data)
+
+        elif type(payload) == types.GeneratorType:
+            bytes = next(payload)
+            first = True
+            for chunk in payload:
+                self._write(message_sender(bytes).fragment(first=first, mask=self.stream.always_mask))
+                bytes = chunk
+                first = False
+
+            self._write(message_sender(bytes).fragment(first=first, last=True, mask=self.stream.always_mask))
+
+        else:
+            raise ValueError("Unsupported type '%s' passed to send()" % type(payload))
+
+    def _get_from_pending(self):
+        """
+        The SSL socket object provides the same interface
+        as the socket interface but behaves differently.
+
+        When data is sent over a SSL connection
+        more data may be read than was requested from by
+        the ambari_ws4py websocket object.
+
+        In that case, the data may have been indeed read
+        from the underlying real socket, but not read by the
+        application which will expect another trigger from the
+        manager's polling mechanism as if more data was still on the
+        wire. This will happen only when new data is
+        sent by the other peer which means there will be
+        some delay before the initial read data is handled
+        by the application.
+
+        Due to this, we have to rely on a non-public method
+        to query the internal SSL socket buffer if it has indeed
+        more data pending in its buffer.
+
+        Now, some people in the Python community
+        `discourage <https://bugs.python.org/issue21430>`_
+        this usage of the ``pending()`` method because it's not
+        the right way of dealing with such use case. They advise
+        `this approach <https://docs.python.org/dev/library/ssl.html#notes-on-non-blocking-sockets>`_
+        instead. Unfortunately, this applies only if the
+        application can directly control the poller which is not
+        the case with the WebSocket abstraction here.
+
+        We therefore rely on this `technic <http://stackoverflow.com/questions/3187565/select-and-ssl-in-python>`_
+        which seems to be valid anyway.
+
+        This is a bit of a shame because we have to process
+        more data than what wanted initially.
+        """
+        data = b""
+        pending = self.sock.pending()
+        while pending:
+            data += self.sock.recv(pending)
+            pending = self.sock.pending()
+        return data
+
+    def once(self):
+        """
+        Performs the operation of reading from the underlying
+        connection in order to feed the stream of bytes.
+
+        Because this needs to support SSL sockets, we must always
+        read as much as might be in the socket at any given time,
+        however process expects to have itself called with only a certain
+        number of bytes at a time. That number is found in
+        self.reading_buffer_size, so we read everything into our own buffer,
+        and then from there feed self.process.
+
+        Then the stream indicates
+        whatever size must be read from the connection since
+        it knows the frame payload length.
+
+        It returns `False` if an error occurred at the
+        socket level or during the bytes processing. Otherwise,
+        it returns `True`.
+        """
+        if self.terminated:
+            logger.debug("WebSocket is already terminated")
+            return False
+        try:
+            b = self.sock.recv(self.reading_buffer_size)
+            if self._is_secure:
+                b += self._get_from_pending()
+            if not b:
+                return False
+            self.buf += b
+        except (socket.error, OSError, pyOpenSSLError) as e:
+            if hasattr(e, "errno") and e.errno == errno.EINTR:
+                pass
+            else:
+                self.unhandled_error(e)
+                return False
+        else:
+            # process as much as we can
+            # the process will stop either if there is no buffer left
+            # or if the stream is closed
+            if not self.process(self.buf):
+                return False
+            self.buf = b""
+
+        return True
+
+    def terminate(self):
+        """
+        Completes the websocket by calling the `closed`
+        method either using the received closing code
+        and reason, or when none was received, using
+        the special `1006` code.
+
+        Finally close the underlying connection for
+        good and cleanup resources by unsetting
+        the `environ` and `stream` attributes.
+        """
+        s = self.stream
+
+        try:
+            if s.closing is None:
+                self.closed(1006, "Going away")
+            else:
+                self.closed(s.closing.code, s.closing.reason)
+        finally:
+            self.client_terminated = self.server_terminated = True
+            self.close_connection()
+
+            # Cleaning up resources
+            s._cleanup()
+            self.stream = None
+            self.environ = None
+
+    def process(self, bytes):
+        """ Takes some bytes and process them through the
+        internal stream's parser. If a message of any kind is
+        found, performs one of these actions:
+
+        * A closing message will initiate the closing handshake
+        * Errors will initiate a closing handshake
+        * A message will be passed to the ``received_message`` method
+        * Pings will see pongs be sent automatically
+        * Pongs will be passed to the ``ponged`` method
+
+        The process should be terminated when this method
+        returns ``False``.
+        """
+        s = self.stream
+
+        if not bytes and self.reading_buffer_size > 0:
+            return False
+
+        self.reading_buffer_size = s.parser.send(bytes) or DEFAULT_READING_SIZE
+
+        if s.closing is not None:
+            logger.debug("Closing message received (%d) '%s'" % (s.closing.code, s.closing.reason))
+            if not self.server_terminated:
+                self.close(s.closing.code, s.closing.reason)
+            else:
+                self.client_terminated = True
+            return False
+
+        if s.errors:
+            for error in s.errors:
+                logger.debug("Error message received (%d) '%s'" % (error.code, error.reason))
+                self.close(error.code, error.reason)
+            s.errors = []
+            return False
+
+        if s.has_message:
+            self.received_message(s.message)
+            if s.message is not None:
+                s.message.data = None
+                s.message = None
+            return True
+
+        if s.pings:
+            for ping in s.pings:
+                self._write(s.pong(ping.data))
+            s.pings = []
+
+        if s.pongs:
+            for pong in s.pongs:
+                self.ponged(pong)
+            s.pongs = []
+
+        return True
+
+    def run(self):
+        """
+        Performs the operation of reading from the underlying
+        connection in order to feed the stream of bytes.
+
+        We start with a small size of two bytes to be read
+        from the connection so that we can quickly parse an
+        incoming frame header. Then the stream indicates
+        whatever size must be read from the connection since
+        it knows the frame payload length.
+
+        Note that we perform some automatic opererations:
+
+        * On a closing message, we respond with a closing
+          message and finally close the connection
+        * We respond to pings with pong messages.
+        * Whenever an error is raised by the stream parsing,
+          we initiate the closing of the connection with the
+          appropiate error code.
+
+        This method is blocking and should likely be run
+        in a thread.
+        """
+        self.sock.setblocking(True)
+        with Heartbeat(self, frequency=self.heartbeat_freq):
+            s = self.stream
+
+            try:
+                self.opened()
+                while not self.terminated:
+                    if not self.once():
+                        break
+            finally:
+                self.terminate()
+
+class EchoWebSocket(WebSocket):
+    def received_message(self, message):
+        """
+        Automatically sends back the provided ``message`` to
+        its originating endpoint.
+        """
+        self.send(message.data, message.is_binary)

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-project/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-project/pom.xml b/ambari-project/pom.xml
index 98da9f4..5333d9d 100644
--- a/ambari-project/pom.xml
+++ b/ambari-project/pom.xml
@@ -30,7 +30,7 @@
     <solr.version>5.5.2</solr.version>
     <ambari.dir>${project.parent.basedir}</ambari.dir>
     <powermock.version>1.6.3</powermock.version>
-    <jetty.version>8.1.19.v20160209</jetty.version>
+    <jetty.version>9.4.2.v20170220</jetty.version>
     <checkstyle.version>6.19</checkstyle.version> <!-- last version that does not require Java 8 -->
     <forkCount>4</forkCount>
     <reuseForks>false</reuseForks>
@@ -123,17 +123,17 @@
       <dependency>
         <groupId>org.springframework.security</groupId>
         <artifactId>spring-security-core</artifactId>
-        <version>3.1.2.RELEASE</version>
+        <version>4.2.2.RELEASE</version>
       </dependency>
       <dependency>
         <groupId>org.springframework.security</groupId>
         <artifactId>spring-security-config</artifactId>
-        <version>3.1.2.RELEASE</version>
+        <version>4.2.2.RELEASE</version>
       </dependency>
       <dependency>
         <groupId>org.springframework.security</groupId>
         <artifactId>spring-security-web</artifactId>
-        <version>3.1.2.RELEASE</version>
+        <version>4.2.2.RELEASE</version>
       </dependency>
       <dependency>
         <groupId>org.springframework.security.kerberos</groupId>
@@ -293,6 +293,11 @@
         <version>${jetty.version}</version>
       </dependency>
       <dependency>
+        <groupId>org.eclipse.jetty</groupId>
+        <artifactId>jetty-util-ajax</artifactId>
+        <version>${jetty.version}</version>
+      </dependency>
+      <dependency>
         <groupId>org.mortbay.jetty</groupId>
         <artifactId>jsp-api-2.1-glassfish</artifactId>
         <version>2.1.v20100127</version>
@@ -303,6 +308,16 @@
         <version>2.1.v20100127</version>
       </dependency>
       <dependency>
+        <groupId>org.eclipse.jetty.websocket</groupId>
+        <artifactId>websocket-servlet</artifactId>
+        <version>${jetty.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.eclipse.jetty.websocket</groupId>
+        <artifactId>websocket-server</artifactId>
+        <version>${jetty.version}</version>
+      </dependency>
+      <dependency>
         <groupId>org.apache.ant</groupId>
         <artifactId>ant</artifactId>
         <version>1.7.1</version>
@@ -415,6 +430,83 @@
         <version>1.19</version>
       </dependency>
       <dependency>
+        <groupId>org.springframework</groupId>
+        <artifactId>spring-aop</artifactId>
+        <version>4.3.7.RELEASE</version>
+      </dependency>
+      <dependency>
+        <groupId>org.springframework</groupId>
+        <artifactId>spring-jdbc</artifactId>
+        <version>4.3.7.RELEASE</version>
+      </dependency>
+      <dependency>
+        <groupId>org.springframework</groupId>
+        <artifactId>spring-core</artifactId>
+        <version>4.3.7.RELEASE</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.springframework</groupId>
+        <artifactId>spring-context</artifactId>
+        <version>4.3.7.RELEASE</version>
+      </dependency>
+      <dependency>
+        <groupId>org.springframework</groupId>
+        <artifactId>spring-context-support</artifactId>
+        <version>4.3.7.RELEASE</version>
+      </dependency>
+      <dependency>
+        <groupId>org.springframework</groupId>
+        <artifactId>spring-web</artifactId>
+        <version>4.3.7.RELEASE</version>
+      </dependency>
+      <dependency>
+        <groupId>org.springframework</groupId>
+        <artifactId>spring-websocket</artifactId>
+        <version>4.3.7.RELEASE</version>
+      </dependency>
+      <dependency>
+        <groupId>org.springframework</groupId>
+        <artifactId>spring-messaging</artifactId>
+        <version>4.3.7.RELEASE</version>
+      </dependency>
+      <dependency>
+        <groupId>org.springframework</groupId>
+        <artifactId>spring-webmvc</artifactId>
+        <version>4.3.7.RELEASE</version>
+      </dependency>
+      <dependency>
+        <groupId>com.sun.jersey.contribs</groupId>
+        <artifactId>jersey-spring</artifactId>
+        <version>1.19</version>
+        <exclusions>
+          <exclusion>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-core</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-web</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-beans</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-context</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-aop</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <dependency>
         <groupId>log4j</groupId>
         <artifactId>log4j</artifactId>
         <version>1.2.17</version>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 8b4c8d6..7fc62f2 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -1206,6 +1206,10 @@
       <version>${jetty.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-util-ajax</artifactId>
+    </dependency>
+    <dependency>
       <groupId>org.mortbay.jetty</groupId>
       <artifactId>jsp-api-2.1-glassfish</artifactId>
     </dependency>
@@ -1221,7 +1225,14 @@
     <dependency>
       <groupId>org.eclipse.jetty</groupId>
       <artifactId>jetty-server</artifactId>
-      <version>${jetty.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty.websocket</groupId>
+      <artifactId>websocket-servlet</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty.websocket</groupId>
+      <artifactId>websocket-server</artifactId>
     </dependency>
     <dependency>
       <groupId>commons-logging</groupId>
@@ -1301,6 +1312,75 @@
       <scope>test</scope>
     </dependency>
     <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-aop</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-jdbc</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-core</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-context</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-web</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-websocket</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-messaging</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-webmvc</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>com.sun.jersey.contribs</groupId>
+      <artifactId>jersey-spring</artifactId>
+      <version>1.19</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.springframework</groupId>
+          <artifactId>spring</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.springframework</groupId>
+          <artifactId>spring-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.springframework</groupId>
+          <artifactId>spring-web</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.springframework</groupId>
+          <artifactId>spring-beans</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.springframework</groupId>
+          <artifactId>spring-context</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.springframework</groupId>
+          <artifactId>spring-aop</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
       <groupId>com.sun.jersey.jersey-test-framework</groupId>
       <artifactId>jersey-test-framework-grizzly2</artifactId>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8de3961b/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index f192895..e5cd709 100644
--- a/pom.xml
+++ b/pom.xml
@@ -306,6 +306,8 @@
             <exclude>ambari-common/src/main/python/ambari_simplejson/**</exclude>
             <!--Stomp library (Apache license)-->
             <exclude>ambari-common/src/main/python/ambari_stomp/**</exclude>
+            <!--ws4py library (BSD 3-Clause)-->
+            <exclude>ambari-common/src/main/python/ambari_ws4py/**</exclude>
 
             <exclude>ambari-web/node_modules/**</exclude>
 


[28/41] ambari git commit: AMBARI-20413.The Zoom feature in WFM, hides the WF completely either with Maximum and Min(M Madhan Nohan Reddy via padmapriyanitt)

Posted by ao...@apache.org.
AMBARI-20413.The Zoom feature in WFM, hides the WF completely either with Maximum and Min(M Madhan Nohan Reddy via padmapriyanitt)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/be230392
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/be230392
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/be230392

Branch: refs/heads/branch-3.0-perf
Commit: be2303927e30ccab0ec70ff5823a17204eaf2644
Parents: cc88312
Author: padmapriyanitt <pa...@gmail.com>
Authored: Tue Apr 4 16:26:18 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../resources/ui/app/domain/cytoscape-flow-renderer.js   | 11 +++++++++--
 1 file changed, 9 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/be230392/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
index bfe59e2..af84f86 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
@@ -48,8 +48,6 @@ var CytoscapeRenderer= Ember.Object.extend({
     };
 
     this.cy.panzoom( defaults );
-    //this.cy.center();
-    this.cy.pan({x:200,y:50});
     this._addEvents(this.cy);
     var self = this;
     this.get("context").$('.overlay-transition-content').popover({
@@ -69,6 +67,14 @@ var CytoscapeRenderer= Ember.Object.extend({
   _setCyOverflow() {
     Ember.set(this.get("cyOverflow"), "overflown", this.cy.elements().renderedBoundingBox().y2 > this.cy.height());
   },
+  _setGraphCenter() {
+    var startDataNode = this.get("dataNodes").filterBy("data.type", "start");
+    if (startDataNode[0] && startDataNode[0].data.id) {
+      var startNode = this.cy.$("#" + startDataNode[0].data.id);
+      this.cy.center();
+      this.cy.pan({y:50});
+    }
+  },
   _getShape(nodeType) {
     switch(nodeType) {
       case 'start' :
@@ -394,6 +400,7 @@ var CytoscapeRenderer= Ember.Object.extend({
     this.cy.endBatch();
     this.cy.layout(this.get("layoutConfigs"));
     this._setCyOverflow();
+    this._setGraphCenter();
   },
 
   initRenderer(callback, settings){


[27/41] ambari git commit: AMBARI-20662.Need to auto populate the workflow parameters if its already defined in the global space.(M Madhan Mohan Reddy via padmapriyanitt)

Posted by ao...@apache.org.
AMBARI-20662.Need to auto populate the workflow parameters if its already defined in the global space.(M Madhan Mohan Reddy via padmapriyanitt)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cc88312e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cc88312e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cc88312e

Branch: refs/heads/branch-3.0-perf
Commit: cc88312e095ae6c025661b82cfb0207ebaceb5de
Parents: 0472e5f
Author: padmapriyanitt <pa...@gmail.com>
Authored: Tue Apr 4 16:21:27 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../wfmanager/src/main/resources/ui/app/components/job-config.js | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/cc88312e/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
index e9c7c15..6aed9da 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
@@ -121,15 +121,17 @@ export default Ember.Component.extend(Validations, {
       if (value!== Constants.defaultNameNodeValue && value!==Constants.rmDefaultValue){
         var propName = value.trim().substring(2, value.length-1);
         var isRequired = true;
+        var val = null;
         if(jobParams && jobParams.configuration && jobParams.configuration.property){
           var param = jobParams.configuration.property.findBy('name', propName);
           if(param && param.value){
             isRequired = false;
+            val = param.value;
           }else {
             isRequired = true;
           }
         }
-        let val = null, tabData = self.get("tabInfo");
+        let tabData = self.get("tabInfo");
         if(tabData && tabData.isImportedFromDesigner && tabData.configuration && tabData.configuration.settings && tabData.configuration.settings.configuration && tabData.configuration.settings.configuration.property) {
           let propVal = tabData.configuration.settings.configuration.property.findBy('name', propName);
           if(propVal) {


[23/41] ambari git commit: AMBARI-20558. HiveView does not work on WASB ACLs cluster (Gaurav Nagar via nitirajrathore)

Posted by ao...@apache.org.
AMBARI-20558. HiveView does not work on WASB ACLs cluster (Gaurav Nagar via nitirajrathore)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8c10a07a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8c10a07a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8c10a07a

Branch: refs/heads/branch-3.0-perf
Commit: 8c10a07a4ad2e1825392ff99d4cc4c64620b69b7
Parents: aeb6707
Author: Nitiraj Singh Rathore <ni...@gmail.com>
Authored: Tue Apr 4 12:39:08 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java    | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8c10a07a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
index 66679e8..90fa483 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
@@ -66,6 +66,7 @@ public class HdfsApi {
       InterruptedException, HdfsApiException {
     this.authParams = configurationBuilder.buildAuthenticationConfig();
     conf = configurationBuilder.buildConfig();
+    UserGroupInformation.setConfiguration(conf);
     ugi = UserGroupInformation.createProxyUser(username, getProxyUser());
 
     fs = execute(new PrivilegedExceptionAction<FileSystem>() {


[24/41] ambari git commit: AMBARI-20573. Hive view 2.0 fails on LLAP (Gaurav Nagar via nitirajrathore)

Posted by ao...@apache.org.
AMBARI-20573. Hive view 2.0 fails on LLAP (Gaurav Nagar via nitirajrathore)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7d4df5b3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7d4df5b3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7d4df5b3

Branch: refs/heads/branch-3.0-perf
Commit: 7d4df5b30819384e7b72fca852b1319244bdab09
Parents: 8c10a07
Author: Nitiraj Singh Rathore <ni...@gmail.com>
Authored: Tue Apr 4 12:43:23 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../apache/ambari/server/view/ViewRegistry.java | 51 ++++++++++++++++++--
 1 file changed, 48 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7d4df5b3/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
index 5b58abc..c7b2f79 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
@@ -85,6 +85,7 @@ import org.apache.ambari.server.orm.entities.ViewEntity;
 import org.apache.ambari.server.orm.entities.ViewEntityEntity;
 import org.apache.ambari.server.orm.entities.ViewInstanceDataEntity;
 import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
+import org.apache.ambari.server.orm.entities.ViewInstancePropertyEntity;
 import org.apache.ambari.server.orm.entities.ViewParameterEntity;
 import org.apache.ambari.server.orm.entities.ViewResourceEntity;
 import org.apache.ambari.server.orm.entities.ViewURLEntity;
@@ -93,6 +94,8 @@ import org.apache.ambari.server.security.authorization.AuthorizationHelper;
 import org.apache.ambari.server.security.authorization.ResourceType;
 import org.apache.ambari.server.security.authorization.RoleAuthorization;
 import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.ambari.server.utils.Closeables;
@@ -988,7 +991,7 @@ public class ViewRegistry {
 
         try {
           if (checkAutoInstanceConfig(autoConfig, stackId, event.getServiceName(), serviceNames)) {
-            installAutoInstance(clusterId, clusterName, viewEntity, viewName, viewConfig, autoConfig, roles);
+            installAutoInstance(clusterId, clusterName, cluster.getService(event.getServiceName()), viewEntity, viewName, viewConfig, autoConfig, roles);
           }
         } catch (Exception e) {
           LOG.error("Can't auto create instance of view " + viewName + " for cluster " + clusterName +
@@ -1000,9 +1003,10 @@ public class ViewRegistry {
     }
   }
 
-  private void installAutoInstance(Long clusterId, String clusterName, ViewEntity viewEntity, String viewName, ViewConfig viewConfig, AutoInstanceConfig autoConfig, Collection<String> roles) throws SystemException, ValidationException {
+  private void installAutoInstance(Long clusterId, String clusterName, Service service, ViewEntity viewEntity, String viewName, ViewConfig viewConfig, AutoInstanceConfig autoConfig, Collection<String> roles) throws SystemException, ValidationException {
     LOG.info("Auto creating instance of view " + viewName + " for cluster " + clusterName + ".");
     ViewInstanceEntity viewInstanceEntity = createViewInstanceEntity(viewEntity, viewConfig, autoConfig);
+    updateHiveLLAPSettingsIfRequired(viewInstanceEntity, service);
     viewInstanceEntity.setClusterHandle(clusterId);
     installViewInstance(viewInstanceEntity);
     setViewInstanceRoleAccess(viewInstanceEntity, roles);
@@ -1015,6 +1019,47 @@ public class ViewRegistry {
 
   }
 
+  /**
+   * Checks is service is 'HIVE' and INTERACTIVE_SERVICE(LLAP) is enabled. Then, it sets the view instance
+   * parameter 'use.hive.interactive.mode' for the 'AUTO_INSTANCE_VIEW' to be true.
+   * @param viewInstanceEntity
+   * @param service
+   */
+  private void updateHiveLLAPSettingsIfRequired(ViewInstanceEntity viewInstanceEntity, Service service) {
+    String INTERACTIVE_KEY = "use.hive.interactive.mode";
+    String LLAP_COMPONENT_NAME = "HIVE_SERVER_INTERACTIVE";
+    String viewVersion = viewInstanceEntity.getViewDefinition().getVersion();
+    String viewName = viewInstanceEntity.getViewDefinition().getViewName();
+    if(!viewName.equalsIgnoreCase("HIVE") || viewVersion.equalsIgnoreCase("1.0.0")) {
+      return;
+    }
+
+    try {
+      ServiceComponent component = service.getServiceComponent(LLAP_COMPONENT_NAME);
+      if (component.getServiceComponentHosts().size() == 0) {
+        // The LLAP server is not installed in any of the hosts. Hence, return;
+        return;
+      }
+
+      for (Map.Entry<String, String> property : viewInstanceEntity.getPropertyMap().entrySet()) {
+        if (INTERACTIVE_KEY.equals(property.getKey()) && (!"true".equalsIgnoreCase(property.getValue()))) {
+          ViewInstancePropertyEntity propertyEntity = new ViewInstancePropertyEntity();
+          propertyEntity.setViewInstanceName(viewInstanceEntity.getName());
+          propertyEntity.setViewName(viewInstanceEntity.getViewName());
+          propertyEntity.setName(INTERACTIVE_KEY);
+          propertyEntity.setValue("true");
+          propertyEntity.setViewInstanceEntity(viewInstanceEntity);
+          viewInstanceEntity.getProperties().add(propertyEntity);
+        }
+      }
+
+    } catch (AmbariException e) {
+      LOG.error("Failed to update '{}' parameter for viewName: {}, version: {}. Exception: {}",
+          INTERACTIVE_KEY, viewName, viewVersion, e);
+    }
+
+  }
+
   private String getUrlName(ViewInstanceEntity viewInstanceEntity) {
     return viewInstanceEntity.getViewEntity().getCommonName().toLowerCase() + "_" + viewInstanceEntity.getInstanceName().toLowerCase();
   }
@@ -1920,7 +1965,7 @@ public class ViewRegistry {
         try {
 
           if (checkAutoInstanceConfig(autoInstanceConfig, stackId, service, serviceNames)) {
-            installAutoInstance(clusterId, clusterName, viewEntity, viewName, viewConfig, autoInstanceConfig, roles);
+            installAutoInstance(clusterId, clusterName, cluster.getService(service), viewEntity, viewName, viewConfig, autoInstanceConfig, roles);
           }
         } catch (Exception e) {
           LOG.error("Can't auto create instance of view " + viewName + " for cluster " + clusterName +


[39/41] ambari git commit: AMBARI-20683 Reduce size of persisted configurations in wizards. (atkach)

Posted by ao...@apache.org.
AMBARI-20683 Reduce size of persisted configurations in wizards. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/426e8955
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/426e8955
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/426e8955

Branch: refs/heads/branch-3.0-perf
Commit: 426e895527d6567708316e840bd17a278139484e
Parents: 5f99b1a
Author: Andrii Tkach <at...@apache.org>
Authored: Wed Apr 5 16:25:23 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/controllers/installer.js         | 15 +++--
 .../main/admin/kerberos/wizard_controller.js    | 18 +++---
 .../controllers/main/service/add_controller.js  | 43 +++++---------
 ambari-web/app/controllers/wizard.js            | 61 +++++++++++++++-----
 ambari-web/app/routes/add_kerberos_routes.js    | 22 ++++---
 ambari-web/app/routes/add_service_routes.js     | 16 +++--
 ambari-web/app/routes/installer.js              | 25 ++++----
 ambari-web/test/controllers/installer_test.js   | 17 ------
 ambari-web/test/controllers/wizard_test.js      | 47 ++++++++++++++-
 ambari-web/test/init_test.js                    | 25 ++++++++
 10 files changed, 176 insertions(+), 113 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/426e8955/ambari-web/app/controllers/installer.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/installer.js b/ambari-web/app/controllers/installer.js
index 0946ed8..369e163 100644
--- a/ambari-web/app/controllers/installer.js
+++ b/ambari-web/app/controllers/installer.js
@@ -1022,14 +1022,13 @@ App.InstallerController = App.WizardController.extend(App.Persist, {
         callback: function () {
           var dfd = $.Deferred();
           var self = this;
-          this.loadServiceConfigProperties().always(function() {
-            self.loadServiceConfigGroups();
-            self.loadCurrentHostGroups();
-            self.loadRecommendationsConfigs();
-            self.loadComponentsFromConfigs();
-            self.loadConfigThemes().then(function() {
-              dfd.resolve();
-            });
+          this.loadServiceConfigGroups();
+          this.loadCurrentHostGroups();
+          this.loadRecommendationsConfigs();
+          this.loadComponentsFromConfigs();
+          this.loadConfigThemes().then(function() {
+            self.loadServiceConfigProperties();
+            dfd.resolve();
           });
           return dfd.promise();
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/426e8955/ambari-web/app/controllers/main/admin/kerberos/wizard_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/kerberos/wizard_controller.js b/ambari-web/app/controllers/main/admin/kerberos/wizard_controller.js
index 93ffcaa..64b2065 100644
--- a/ambari-web/app/controllers/main/admin/kerberos/wizard_controller.js
+++ b/ambari-web/app/controllers/main/admin/kerberos/wizard_controller.js
@@ -280,19 +280,15 @@ App.KerberosWizardController = App.WizardController.extend(App.InstallComponent,
     ],
     '2': [
       {
-        type: 'async',
+        type: 'sync',
         callback: function () {
           var self = this;
-          var dfd = $.Deferred();
-          this.loadServiceConfigProperties().always(function() {
-            if (!self.get('stackConfigsLoaded')) {
-              App.config.loadConfigsFromStack(['KERBEROS']).complete(function() {
-                self.set('stackConfigsLoaded', true);
-              }, self);
-            }
-            dfd.resolve();
-          });
-          return dfd.promise();
+          this.loadServiceConfigProperties();
+          if (!self.get('stackConfigsLoaded')) {
+            App.config.loadConfigsFromStack(['KERBEROS']).complete(function() {
+              self.set('stackConfigsLoaded', true);
+            }, self);
+          }
         }
       }
     ],

http://git-wip-us.apache.org/repos/asf/ambari/blob/426e8955/ambari-web/app/controllers/main/service/add_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/add_controller.js b/ambari-web/app/controllers/main/service/add_controller.js
index dc8f76c..ee7719c 100644
--- a/ambari-web/app/controllers/main/service/add_controller.js
+++ b/ambari-web/app/controllers/main/service/add_controller.js
@@ -126,10 +126,9 @@ App.AddServiceController = App.WizardController.extend(App.AddSecurityConfigs, {
           this.loadKerberosDescriptorConfigs().done(function() {
             self.loadServiceConfigGroups();
             self.loadConfigThemes().then(function() {
-              self.loadServiceConfigProperties().always(function() {
-                self.loadCurrentHostGroups();
-                dfd.resolve();
-              });
+              self.loadServiceConfigProperties();
+              self.loadCurrentHostGroups();
+              dfd.resolve();
             });
           });
           return dfd.promise();
@@ -291,19 +290,14 @@ App.AddServiceController = App.WizardController.extend(App.AddSecurityConfigs, {
   },
 
   loadServiceConfigProperties: function () {
-    var self = this;
-    var dfd = $.Deferred();
-    this._super().always(function() {
-      if (!self.get('content.services')) {
-        self.loadServices();
-      }
-      if (self.get('currentStep') > 1 && self.get('currentStep') < 6) {
-        self.set('content.skipConfigStep', self.skipConfigStep());
-        self.get('isStepDisabled').findProperty('step', 4).set('value', self.get('content.skipConfigStep'));
-      }
-      dfd.resolve();
-    });
-    return dfd.promise();
+    this._super();
+    if (!this.get('content.services')) {
+      this.loadServices();
+    }
+    if (this.get('currentStep') > 1 && this.get('currentStep') < 6) {
+      this.set('content.skipConfigStep', this.skipConfigStep());
+      this.get('isStepDisabled').findProperty('step', 4).set('value', this.get('content.skipConfigStep'));
+    }
   },
 
   /**
@@ -331,16 +325,11 @@ App.AddServiceController = App.WizardController.extend(App.AddSecurityConfigs, {
   },
 
   saveServiceConfigProperties: function (stepController) {
-    var dfd = $.Deferred();
-    var self = this;
-    this._super(stepController).always(function() {
-      if (self.get('currentStep') > 1 && self.get('currentStep') < 6) {
-        self.set('content.skipConfigStep', self.skipConfigStep());
-        self.get('isStepDisabled').findProperty('step', 4).set('value', self.get('content.skipConfigStep'));
-      }
-      dfd.resolve();
-    });
-    return dfd.promise();
+    this._super(stepController);
+    if (this.get('currentStep') > 1 && this.get('currentStep') < 6) {
+      this.set('content.skipConfigStep', this.skipConfigStep());
+      this.get('isStepDisabled').findProperty('step', 4).set('value', this.get('content.skipConfigStep'));
+    }
   },
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/426e8955/ambari-web/app/controllers/wizard.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard.js b/ambari-web/app/controllers/wizard.js
index 49bdac6..c3a54cf 100644
--- a/ambari-web/app/controllers/wizard.js
+++ b/ambari-web/app/controllers/wizard.js
@@ -21,7 +21,7 @@ var App = require('app');
 
 require('models/host');
 
-App.WizardController = Em.Controller.extend(App.LocalStorage, App.ThemesMappingMixin, App.Persist, {
+App.WizardController = Em.Controller.extend(App.LocalStorage, App.ThemesMappingMixin, {
 
   isStepDisabled: null,
 
@@ -908,26 +908,45 @@ App.WizardController = Em.Controller.extend(App.LocalStorage, App.ThemesMappingM
   },
 
   /**
-   * Load serviceConfigProperties from persist
-   * @return {$.Deferred}
+   * Load serviceConfigProperties from localStorage
    */
   loadServiceConfigProperties: function () {
-    var dfd = $.Deferred();
-    var self = this;
-    this.getDecompressedData('serviceConfigProperties').always(function(data) {
-      if (data && !data.error) {
-        self.set('content.serviceConfigProperties', data);
-      }
-      dfd.resolve();
-    });
-    return dfd.promise();
+    var stackConfigs = App.configsCollection.getAll();
+    var serviceConfigProperties = this.getDBProperty('serviceConfigProperties');
+    this.set('content.serviceConfigProperties', this.applyStoredConfigs(stackConfigs, serviceConfigProperties));
+  },
+
+  /**
+   *
+   * @param {array} configs
+   * @param {?array} storedConfigs
+   * @returns {?array}
+   */
+  applyStoredConfigs: function(configs, storedConfigs) {
+    if (storedConfigs && storedConfigs.length) {
+      let result = [];
+      let configsMap = configs.toMapByProperty('id');
+      storedConfigs.forEach(function(stored) {
+        var config = configsMap[stored.id];
+        if (config) {
+          result.push(Object.assign({}, config, stored, {savedValue: null}));
+        } else if (stored.isUserProperty) {
+          result.push(Object.assign({}, stored));
+        }
+      });
+      return result;
+    }
+    return storedConfigs;
   },
+
   /**
    * Save config properties
    * @param stepController Step7WizardController
    */
   saveServiceConfigProperties: function (stepController) {
     var serviceConfigProperties = [];
+    // properties in db should contain only mutable info to avoid localStorage overflow
+    var dbConfigProperties = [];
     var fileNamesToUpdate = this.getDBProperty('fileNamesToUpdate') || [];
     var installedServiceNames = stepController.get('installedServiceNames') || [];
     var installedServiceNamesMap = installedServiceNames.toWickMap();
@@ -948,9 +967,19 @@ App.WizardController = Em.Controller.extend(App.LocalStorage, App.ThemesMappingM
         );
         configProperty = App.config.mergeStaticProperties(configProperty, _configProperties, [], ['name', 'filename', 'isUserProperty', 'value']);
 
+        var dbConfigProperty = {
+          id: _configProperties.get('id'),
+          value: _configProperties.get('value'),
+          isFinal: _configProperties.get('isFinal')
+        };
+        if (_configProperties.get('isUserProperty') || _configProperties.get('filename') === 'capacity-scheduler.xml') {
+          dbConfigProperty = configProperty;
+        }
         if (this.isExcludedConfig(configProperty)) {
           configProperty.value = '';
+          dbConfigProperty.value = '';
         }
+        dbConfigProperties.push(dbConfigProperty);
         serviceConfigProperties.push(configProperty);
       }, this);
       // check for configs that need to update for installed services
@@ -969,8 +998,10 @@ App.WizardController = Em.Controller.extend(App.LocalStorage, App.ThemesMappingM
       }
     }, this);
     this.set('content.serviceConfigProperties', serviceConfigProperties);
-    this.setDBProperty('fileNamesToUpdate', fileNamesToUpdate);
-    return this.postCompressedData('serviceConfigProperties', serviceConfigProperties);
+    this.setDBProperties({
+      fileNamesToUpdate: fileNamesToUpdate,
+      serviceConfigProperties: dbConfigProperties
+    });
   },
 
   isExcludedConfig: function (configProperty) {
@@ -1435,7 +1466,7 @@ App.WizardController = Em.Controller.extend(App.LocalStorage, App.ThemesMappingM
 
   clearServiceConfigProperties: function() {
     this.get('content.serviceConfigProperties', null);
-    return this.postCompressedData('serviceConfigProperties', '');
+    return this.setDBProperty('serviceConfigProperties', null);
   },
 
   saveTasksStatuses: function (tasksStatuses) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/426e8955/ambari-web/app/routes/add_kerberos_routes.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/add_kerberos_routes.js b/ambari-web/app/routes/add_kerberos_routes.js
index 3dbf050..50d5595 100644
--- a/ambari-web/app/routes/add_kerberos_routes.js
+++ b/ambari-web/app/routes/add_kerberos_routes.js
@@ -134,9 +134,8 @@ module.exports = App.WizardRoute.extend({
       var kerberosStep1controller = router.get('kerberosWizardStep1Controller');
 
       kerberosWizardController.saveKerberosOption(kerberosStep1controller);
-      kerberosWizardController.clearServiceConfigProperties().always(function() {
-        router.transitionTo('step2');
-      });
+      kerberosWizardController.clearServiceConfigProperties();
+      router.transitionTo('step2');
     }
   }),
 
@@ -174,15 +173,14 @@ module.exports = App.WizardRoute.extend({
         kerberosWizardStep2Controller.get('stepConfigs')[0].get('configs').findProperty('name', 'manage_krb5_conf').set('value', 'false');
       }
 
-      kerberosWizardController.saveServiceConfigProperties(kerberosWizardStep2Controller, true).always(function() {
-        kerberosWizardController.clearTasksData();
-        if (kerberosWizardController.get('skipClientInstall')) {
-          kerberosWizardController.setDBProperty('kerberosDescriptorConfigs', null);
-          router.transitionTo('step4');
-        } else {
-          router.transitionTo('step3');
-        }
-      });
+      kerberosWizardController.saveServiceConfigProperties(kerberosWizardStep2Controller, true);
+      kerberosWizardController.clearTasksData();
+      if (kerberosWizardController.get('skipClientInstall')) {
+        kerberosWizardController.setDBProperty('kerberosDescriptorConfigs', null);
+        router.transitionTo('step4');
+      } else {
+        router.transitionTo('step3');
+      }
     }
   }),
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/426e8955/ambari-web/app/routes/add_service_routes.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/add_service_routes.js b/ambari-web/app/routes/add_service_routes.js
index 773042b..89a4a36 100644
--- a/ambari-web/app/routes/add_service_routes.js
+++ b/ambari-web/app/routes/add_service_routes.js
@@ -231,12 +231,11 @@ module.exports = App.WizardRoute.extend({
           router.get('wizardStep7Controller').clearAllRecommendations();
           addServiceController.setDBProperty('serviceConfigGroups', undefined);
           App.ServiceConfigGroup.find().clear();
-          addServiceController.clearServiceConfigProperties().always(function() {
-            if (App.get('isKerberosEnabled')) {
-              addServiceController.setDBProperty('kerberosDescriptorConfigs', null);
-            }
-            router.transitionTo('step4');
-          });
+          addServiceController.clearServiceConfigProperties();
+          if (App.get('isKerberosEnabled')) {
+            addServiceController.setDBProperty('kerberosDescriptorConfigs', null);
+          }
+          router.transitionTo('step4');
         });
       });
     }
@@ -290,9 +289,8 @@ module.exports = App.WizardRoute.extend({
           }
         }
         addServiceController.saveServiceConfigGroups(wizardStep7Controller, true);
-        addServiceController.saveServiceConfigProperties(wizardStep7Controller).always(function() {
-          router.transitionTo('step5');
-        });
+        addServiceController.saveServiceConfigProperties(wizardStep7Controller);
+        router.transitionTo('step5');
       });
     }
   }),

http://git-wip-us.apache.org/repos/asf/ambari/blob/426e8955/ambari-web/app/routes/installer.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/installer.js b/ambari-web/app/routes/installer.js
index 1048e8e..daefa48 100644
--- a/ambari-web/app/routes/installer.js
+++ b/ambari-web/app/routes/installer.js
@@ -379,10 +379,9 @@ module.exports = Em.Route.extend(App.RouterRedirections, {
               recommendationsConfigs: null,
               componentsFromConfigs: []
             });
-            controller.clearServiceConfigProperties().then(function() {
-              router.transitionTo('step7');
-              console.timeEnd('step6 next');
-            });
+            controller.clearServiceConfigProperties();
+            router.transitionTo('step7');
+            console.timeEnd('step6 next');
           }
         });
       }
@@ -435,15 +434,17 @@ module.exports = Em.Route.extend(App.RouterRedirections, {
         App.set('router.nextBtnClickInProgress', true);
         var controller = router.get('installerController');
         var wizardStep7Controller = router.get('wizardStep7Controller');
-        controller.saveServiceConfigProperties(wizardStep7Controller).always(function() {
-          controller.saveServiceConfigGroups(wizardStep7Controller);
-          controller.setDBProperty('recommendationsConfigs', wizardStep7Controller.get('recommendationsConfigs'));
-          controller.saveComponentsFromConfigs(controller.get('content.componentsFromConfigs'));
-          controller.setDBProperty('recommendationsHostGroup', wizardStep7Controller.get('content.recommendationsHostGroup'));
-          controller.setDBProperty('masterComponentHosts', wizardStep7Controller.get('content.masterComponentHosts'));
-          router.transitionTo('step8');
-          console.timeEnd('step7 next');
+        controller.saveServiceConfigProperties(wizardStep7Controller);
+        controller.saveServiceConfigGroups(wizardStep7Controller);
+        controller.setDBProperty('recommendationsConfigs', wizardStep7Controller.get('recommendationsConfigs'));
+        controller.saveComponentsFromConfigs(controller.get('content.componentsFromConfigs'));
+        controller.setDBProperty('recommendationsHostGroup', wizardStep7Controller.get('content.recommendationsHostGroup'));
+        controller.setDBProperty('masterComponentHosts', wizardStep7Controller.get('content.masterComponentHosts'));
+        App.clusterStatus.setClusterStatus({
+          localdb: App.db.data
         });
+        router.transitionTo('step8');
+        console.timeEnd('step7 next');
       }
     }
   }),

http://git-wip-us.apache.org/repos/asf/ambari/blob/426e8955/ambari-web/test/controllers/installer_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/installer_test.js b/ambari-web/test/controllers/installer_test.js
index d936ffc..94af88e 100644
--- a/ambari-web/test/controllers/installer_test.js
+++ b/ambari-web/test/controllers/installer_test.js
@@ -785,23 +785,6 @@ describe('App.InstallerController', function () {
     });
   });
 
-  describe('#loadServiceConfigProperties', function() {
-    beforeEach(function () {
-      sinon.stub(installerController, 'getDecompressedData').returns($.Deferred().resolve({
-        value: 2
-      }).promise());
-    });
-    afterEach(function () {
-      installerController.getDecompressedData.restore();
-    });
-    it ('Should load service config property', function() {
-      installerController.loadServiceConfigProperties();
-      expect(installerController.get('content.serviceConfigProperties')).to.eql({
-        "value": 2
-      });
-    });
-  });
-
   describe('#saveServices', function() {
     it ('Should return correct names', function() {
       var stepController = Em.A([

http://git-wip-us.apache.org/repos/asf/ambari/blob/426e8955/ambari-web/test/controllers/wizard_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/wizard_test.js b/ambari-web/test/controllers/wizard_test.js
index 1a00f24..078f8ae 100644
--- a/ambari-web/test/controllers/wizard_test.js
+++ b/ambari-web/test/controllers/wizard_test.js
@@ -1055,7 +1055,6 @@ describe('App.WizardController', function () {
       sinon.stub(c, 'setDBProperty', Em.K);
       sinon.stub(c, 'setDBProperties', Em.K);
       sinon.stub(c, 'getDBProperty').withArgs('fileNamesToUpdate').returns([]);
-      sinon.stub(c, 'postCompressedData', Em.K);
       sinon.stub(App.config, 'shouldSupportFinal').returns(true);
     });
 
@@ -1063,7 +1062,6 @@ describe('App.WizardController', function () {
       c.setDBProperty.restore();
       c.setDBProperties.restore();
       c.getDBProperty.restore();
-      c.postCompressedData.restore();
       App.config.shouldSupportFinal.restore();
     });
 
@@ -1743,4 +1741,49 @@ describe('App.WizardController', function () {
     });
   });
 
+  describe('#applyStoredConfigs', function() {
+
+    it('should return null when storedConfigs null', function() {
+      expect(c.applyStoredConfigs([], null)).to.be.null;
+    });
+
+    it('should merged configs when storedConfigs has items', function() {
+      var storedConfigs = [
+        {
+          id: 1,
+          value: 'foo',
+          isFinal: false
+        },
+        {
+          id: 2,
+          value: 'foo2',
+          isFinal: true,
+          isUserProperty: true
+        }
+      ];
+      var configs = [
+        {
+          id: 1,
+          value: '',
+          isFinal: true
+        }
+      ];
+      expect(c.applyStoredConfigs(configs, storedConfigs)).to.be.eql([
+        {
+          id: 1,
+          value: 'foo',
+          isFinal: false,
+          savedValue: null
+        },
+        {
+          id: 2,
+          value: 'foo2',
+          isFinal: true,
+          isUserProperty: true
+        }
+      ]);
+    });
+  });
+
+
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/426e8955/ambari-web/test/init_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/init_test.js b/ambari-web/test/init_test.js
index 02188ff..db72744 100644
--- a/ambari-web/test/init_test.js
+++ b/ambari-web/test/init_test.js
@@ -91,6 +91,31 @@ if (!Array.prototype.includes) {
   });
 }
 
+if (typeof Object.assign != 'function') {
+  Object.assign = function(target, varArgs) { // .length of function is 2
+    'use strict';
+    if (target == null) { // TypeError if undefined or null
+      throw new TypeError('Cannot convert undefined or null to object');
+    }
+
+    var to = Object(target);
+
+    for (var index = 1; index < arguments.length; index++) {
+      var nextSource = arguments[index];
+
+      if (nextSource != null) { // Skip over if undefined or null
+        for (var nextKey in nextSource) {
+          // Avoid bugs when hasOwnProperty is shadowed
+          if (Object.prototype.hasOwnProperty.call(nextSource, nextKey)) {
+            to[nextKey] = nextSource[nextKey];
+          }
+        }
+      }
+    }
+    return to;
+  };
+}
+
 Number.isFinite = Number.isFinite || function(value) {
   return typeof value === 'number' && isFinite(value);
 };


[33/41] ambari git commit: AMBARI-20672 - Cluster Merge At End Of Upgrade Creation Cascades Unnecessarily (jonathanhurley)

Posted by ao...@apache.org.
AMBARI-20672 - Cluster Merge At End Of Upgrade Creation Cascades Unnecessarily (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/64447e52
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/64447e52
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/64447e52

Branch: refs/heads/branch-3.0-perf
Commit: 64447e524b077e11cce8dd4b76e6f7dffd047f09
Parents: 45d8baf
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Apr 4 12:57:33 2017 -0400
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../internal/UpgradeResourceProvider.java       | 86 ++++++++++----------
 .../alerts/AlertStateChangedListener.java       |  2 +-
 .../listeners/upgrade/StackVersionListener.java |  2 +-
 .../ambari/server/orm/dao/UpgradeDAO.java       |  5 --
 .../server/orm/entities/ClusterEntity.java      | 13 ++-
 .../server/orm/entities/UpgradeEntity.java      | 27 ++++--
 .../org/apache/ambari/server/state/Cluster.java | 30 +++----
 .../server/state/cluster/ClusterImpl.java       | 48 ++---------
 .../services/RetryUpgradeActionService.java     |  2 +-
 .../internal/UpgradeResourceProviderTest.java   |  4 +-
 .../UpgradeSummaryResourceProviderTest.java     | 16 ++--
 .../upgrade/StackVersionListenerTest.java       |  2 +-
 .../ambari/server/orm/dao/UpgradeDAOTest.java   | 21 +++--
 .../upgrades/UpgradeActionTest.java             |  2 +-
 .../server/state/ServiceComponentTest.java      | 15 +++-
 .../alerts/AlertStateChangedEventTest.java      |  6 +-
 .../services/RetryUpgradeActionServiceTest.java |  2 +-
 17 files changed, 141 insertions(+), 142 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index ca9ce07..709ca93 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -335,7 +335,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     final Cluster cluster;
 
     try {
-      cluster = getManagementController().getClusters().getCluster(clusterName);
+      cluster = clusters.get().getCluster(clusterName);
     } catch (AmbariException e) {
       throw new NoSuchParentResourceException(
           String.format("Cluster %s could not be loaded", clusterName));
@@ -421,7 +421,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
 
       Cluster cluster;
       try {
-        cluster = getManagementController().getClusters().getCluster(clusterName);
+        cluster = clusters.get().getCluster(clusterName);
       } catch (AmbariException e) {
         throw new NoSuchResourceException(
             String.format("Cluster %s could not be loaded", clusterName));
@@ -484,7 +484,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     final Cluster cluster;
 
     try {
-      cluster = getManagementController().getClusters().getCluster(clusterName);
+      cluster = clusters.get().getCluster(clusterName);
     } catch (AmbariException e) {
       throw new NoSuchParentResourceException(
           String.format("Cluster %s could not be loaded", clusterName));
@@ -496,14 +496,11 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
           "manage upgrade and downgrade");
     }
 
-
-
     String requestIdProperty = (String) propertyMap.get(UPGRADE_REQUEST_ID);
     if (null == requestIdProperty) {
       throw new IllegalArgumentException(String.format("%s is required", UPGRADE_REQUEST_ID));
     }
 
-    long clusterId = cluster.getClusterId();
     long requestId = Long.parseLong(requestIdProperty);
     UpgradeEntity upgradeEntity = s_upgradeDAO.findUpgradeByRequestId(requestId);
     if( null == upgradeEntity){
@@ -544,11 +541,11 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         suspended = Boolean.valueOf((String) propertyMap.get(UPGRADE_SUSPENDED));
       }
 
-      setUpgradeRequestStatus(clusterId, requestId, status, propertyMap);
-
-      // When the status of the upgrade's request is changing, we also update the suspended flag.
-      upgradeEntity.setSuspended(suspended);
-      s_upgradeDAO.merge(upgradeEntity);
+      try {
+        setUpgradeRequestStatus(cluster, requestId, status, suspended, propertyMap);
+      } catch (AmbariException ambariException) {
+        throw new SystemException(ambariException.getMessage(), ambariException);
+      }
     }
 
     // if either of the skip failure settings are in the request, then we need
@@ -948,11 +945,12 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
       RequestStageContainer request,
       UpgradeEntity upgradeEntity) throws AmbariException {
 
-    upgradeEntity.setRequestId(request.getId());
-
     request.persist();
+    RequestEntity requestEntity = s_requestDAO.findByPK(request.getId());
 
+    upgradeEntity.setRequestEntity(requestEntity);
     s_upgradeDAO.create(upgradeEntity);
+
     cluster.setUpgradeEntity(upgradeEntity);
 
     return upgradeEntity;
@@ -1670,19 +1668,28 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
    * <li>{@link HostRoleStatus#ABORTED}</li>
    * <li>{@link HostRoleStatus#PENDING}</li>
    * </ul>
+   * This method will also adjust the cluster->upgrade association correctly
+   * based on the new status being supplied.
    *
-   * @param clusterId
-   *          the ID of the cluster
+   * @param cluster
+   *          the cluster
    * @param requestId
    *          the request to change the status for.
    * @param status
    *          the status to set on the associated request.
+   * @param suspended
+   *          if the value of the specified status is
+   *          {@link HostRoleStatus#ABORTED}, then this boolean will control
+   *          whether the upgrade is suspended (still associated with the
+   *          cluster) or aborted (no longer associated with the cluster).
    * @param propertyMap
    *          the map of request properties (needed for things like abort reason
    *          if present)
    */
-  private void setUpgradeRequestStatus(long clusterId, long requestId, HostRoleStatus status,
-      Map<String, Object> propertyMap) {
+  @Transactional
+  void setUpgradeRequestStatus(Cluster cluster, long requestId, HostRoleStatus status,
+      boolean suspended, Map<String, Object> propertyMap) throws AmbariException {
+    // these are the only two states we allow
     if (status != HostRoleStatus.ABORTED && status != HostRoleStatus.PENDING) {
       throw new IllegalArgumentException(String.format("Cannot set status %s, only %s is allowed",
           status, EnumSet.of(HostRoleStatus.ABORTED, HostRoleStatus.PENDING)));
@@ -1710,23 +1717,23 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
 
     ActionManager actionManager = getManagementController().getActionManager();
 
-    if (HostRoleStatus.ABORTED == status) {
-      if (!internalStatus.isCompletedState()) {
-        actionManager.cancelRequest(requestId, reason);
-        // Remove relevant upgrade entity
-        try {
-          Cluster cluster = clusters.get().getClusterById(clusterId);
-          UpgradeEntity upgradeEntity = s_upgradeDAO.findUpgradeByRequestId(requestId);
-          upgradeEntity.setSuspended(true);
-          s_upgradeDAO.merge(upgradeEntity);
-
-          cluster.setUpgradeEntity(null);
-        } catch (AmbariException e) {
-          LOG.warn("Could not clear upgrade entity for cluster with id {}", clusterId, e);
-        }
+    if (HostRoleStatus.ABORTED == status && !internalStatus.isCompletedState()) {
+      // cancel the request
+      actionManager.cancelRequest(requestId, reason);
+
+      // either suspend the upgrade or abort it outright
+      UpgradeEntity upgradeEntity = s_upgradeDAO.findUpgradeByRequestId(requestId);
+      if (suspended) {
+        // set the upgrade to suspended
+        upgradeEntity.setSuspended(suspended);
+        s_upgradeDAO.merge(upgradeEntity);
+      } else {
+        // otherwise remove the association with the cluster since it's being
+        // full aborted
+        cluster.setUpgradeEntity(null);
       }
-    } else {
-      // Status must be PENDING.
+
+    } else if (status == HostRoleStatus.PENDING) {
       List<Long> taskIds = new ArrayList<>();
       List<HostRoleCommandEntity> hrcEntities = s_hostRoleCommandDAO.findByRequestIdAndStatuses(
           requestId, Sets.newHashSet(HostRoleStatus.ABORTED, HostRoleStatus.TIMEDOUT));
@@ -1737,16 +1744,9 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
 
       actionManager.resubmitTasks(taskIds);
 
-      try {
-        Cluster cluster = clusters.get().getClusterById(clusterId);
-        UpgradeEntity lastUpgradeItemForCluster = s_upgradeDAO.findLastUpgradeOrDowngradeForCluster(cluster.getClusterId());
-        lastUpgradeItemForCluster.setSuspended(false);
-        s_upgradeDAO.merge(lastUpgradeItemForCluster);
-
-        cluster.setUpgradeEntity(lastUpgradeItemForCluster);
-      } catch (AmbariException e) {
-        LOG.warn("Could not clear upgrade entity for cluster with id {}", clusterId, e);
-      }
+      UpgradeEntity lastUpgradeItemForCluster = s_upgradeDAO.findLastUpgradeOrDowngradeForCluster(cluster.getClusterId());
+      lastUpgradeItemForCluster.setSuspended(false);
+      s_upgradeDAO.merge(lastUpgradeItemForCluster);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertStateChangedListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertStateChangedListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertStateChangedListener.java
index ce55203..a02a5d8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertStateChangedListener.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertStateChangedListener.java
@@ -227,7 +227,7 @@ public class AlertStateChangedListener {
     Long clusterId = history.getClusterId();
     try {
       Cluster cluster = m_clusters.get().getClusterById(clusterId);
-      if (null != cluster.getUpgradeEntity() || cluster.isUpgradeSuspended()) {
+      if (null != cluster.getUpgradeInProgress()) {
         // only send AMBARI alerts if in an upgrade
         String serviceName = definition.getServiceName();
         if (!StringUtils.equals(serviceName, Services.AMBARI.name())) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java
index bd7eb00..22d7f2e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java
@@ -223,7 +223,7 @@ public class StackVersionListener {
         sch.setUpgradeState(UpgradeState.VERSION_MISMATCH);
       }
     } else if (upgradeState == UpgradeState.VERSION_MISMATCH && desiredVersion.equals(newVersion)) {
-      if (cluster.getUpgradeEntity() != null) {
+      if (cluster.getUpgradeInProgress() != null) {
         sch.setUpgradeState(UpgradeState.COMPLETE);
       } else {
         sch.setUpgradeState(UpgradeState.NONE);

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
index bf9c650..4666edf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
@@ -105,11 +105,6 @@ public class UpgradeDAO {
   @Transactional
   public void create(UpgradeEntity entity) {
     EntityManager entityManager = entityManagerProvider.get();
-    // This is required because since none of the entities
-    // for the request are actually persisted yet,
-    // JPA ordering could allow foreign key entities
-    // to be created after this statement.
-    entityManager.flush();
     entityManager.persist(entity);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java
index 89b0646..527fd7a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java
@@ -142,8 +142,17 @@ public class ClusterEntity {
   })
   private ResourceEntity resource;
 
-  @OneToOne(cascade = CascadeType.ALL)
-  @JoinColumn(name = "upgrade_id", referencedColumnName = "upgrade_id")
+  @Basic
+  @Column(name = "upgrade_id", nullable = true, insertable = false, updatable = false)
+  private Long upgradeId;
+
+  @OneToOne(cascade = CascadeType.REMOVE)
+  @JoinColumn(
+      name = "upgrade_id",
+      referencedColumnName = "upgrade_id",
+      nullable = true,
+      insertable = false,
+      updatable = true)
   /**
    * {@code null} when there is no upgrade/downgrade in progress.
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
index 7421ca1..89574bc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
@@ -24,12 +24,15 @@ import javax.persistence.Column;
 import javax.persistence.Entity;
 import javax.persistence.EnumType;
 import javax.persistence.Enumerated;
+import javax.persistence.FetchType;
 import javax.persistence.GeneratedValue;
 import javax.persistence.GenerationType;
 import javax.persistence.Id;
+import javax.persistence.JoinColumn;
 import javax.persistence.NamedQueries;
 import javax.persistence.NamedQuery;
 import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
 import javax.persistence.Table;
 import javax.persistence.TableGenerator;
 
@@ -42,8 +45,11 @@ import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
  */
 @Entity
 @Table(name = "upgrade")
-@TableGenerator(name = "upgrade_id_generator",
-    table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value",
+@TableGenerator(
+    name = "upgrade_id_generator",
+    table = "ambari_sequences",
+    pkColumnName = "sequence_name",
+    valueColumnName = "sequence_value",
     pkColumnValue = "upgrade_id_seq",
     initialValue = 0)
 @NamedQueries({
@@ -68,9 +74,19 @@ public class UpgradeEntity {
   @Column(name = "cluster_id", nullable = false, insertable = true, updatable = false)
   private Long clusterId;
 
-  @Column(name="request_id", nullable = false)
+  @Column(name = "request_id", nullable = false, insertable = false, updatable = false)
   private Long requestId;
 
+  /**
+   * The request entity associated with this upgrade. This relationship allows
+   * JPA to correctly order non-flushed commits during the transaction which
+   * creates the upgrade. Without it, JPA would not know the correct order and
+   * may try to create the upgrade before the request.
+   */
+  @OneToOne(optional = false, fetch = FetchType.LAZY)
+  @JoinColumn(name = "request_id", nullable = false, insertable = true, updatable = false)
+  private RequestEntity requestEntity = null;
+
   @Column(name="from_version", nullable = false)
   private String fromVersion = null;
 
@@ -161,8 +177,9 @@ public class UpgradeEntity {
   /**
    * @param id the request id
    */
-  public void setRequestId(Long id) {
-    requestId = id;
+  public void setRequestEntity(RequestEntity requestEntity) {
+    this.requestEntity = requestEntity;
+    requestId = requestEntity.getRequestId();
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
index 8074b31..9594803 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
@@ -672,15 +672,21 @@ public interface Cluster {
   boolean isBluePrintDeployed();
 
   /**
-   * @return upgrade that is in progress for a cluster. If no upgrade is going
-   * on, a null is returned.
+   * Gets an {@link UpgradeEntity} if there is an upgrade in progress or an
+   * upgrade that has been suspended. This will return the associated
+   * {@link UpgradeEntity} if it exists.
+   * 
+   * @return an upgrade which will either be in progress or suspended, or
+   *         {@code null} if none.
+   * 
    */
-  UpgradeEntity getUpgradeEntity();
+  UpgradeEntity getUpgradeInProgress();
 
   /**
-   * The value is explicitly set on the ClusterEntity when Creating,
-   * Aborting (switching to downgrade), Resuming, or Finalizing an upgrade.
-   * @param upgradeEntity the upgrade entity to set for cluster
+   * Sets or clears the associated upgrade with the cluster.
+   *
+   * @param upgradeEntity
+   *          the upgrade entity to set for cluster, or {@code null} for none.
    * @throws AmbariException
    */
   void setUpgradeEntity(UpgradeEntity upgradeEntity) throws AmbariException;
@@ -695,18 +701,6 @@ public interface Cluster {
   boolean isUpgradeSuspended();
 
   /**
-   * Gets an {@link UpgradeEntity} if there is an upgrade in progress or an
-   * upgrade that has been suspended. This will first check
-   * {@link #getUpgradeEntity()} and return that if it is not {@code null}.
-   * Otherwise, this will perform a search for the most recent upgrade/downgrade
-   * which has not been completed.
-   *
-   * @return an upgrade which will either be in progress or suspended, or
-   *         {@code null} if none.
-   */
-  UpgradeEntity getUpgradeInProgress();
-
-  /**
    * Returns the name of the service that the passed config type belongs to.
    *
    * @param configType

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 739fe23..b7cc4cd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -1020,39 +1020,6 @@ public class ClusterImpl implements Cluster {
    * {@inheritDoc}
    */
   @Override
-  public UpgradeEntity getUpgradeInProgress() {
-    // first check for an upgrade that's actively running
-    UpgradeEntity upgradeInProgress = getUpgradeEntity();
-    if (null != upgradeInProgress) {
-      return upgradeInProgress;
-    }
-
-    // perform a search for any upgrade which should also return upgrades which
-    // are suspended
-    UpgradeEntity mostRecentUpgrade = upgradeDAO.findLastUpgradeOrDowngradeForCluster(getClusterId());
-    if (mostRecentUpgrade != null) {
-      if (mostRecentUpgrade.isSuspended()) {
-        return mostRecentUpgrade;
-      }
-
-      // look for any item from the prior upgrade which is still in progress
-      // (not failed, completed, or aborted)
-      List<HostRoleCommandEntity> commands = hostRoleCommandDAO.findByRequestIdAndStatuses(
-          mostRecentUpgrade.getRequestId(), HostRoleStatus.IN_PROGRESS_STATUSES);
-
-      if (!commands.isEmpty()) {
-        return mostRecentUpgrade;
-      }
-    }
-
-    return null;
-  }
-
-
-  /**
-   * {@inheritDoc}
-   */
-  @Override
   public ClusterVersionEntity getEffectiveClusterVersion() throws AmbariException {
     UpgradeEntity upgradeEntity = getUpgradeInProgress();
     if (upgradeEntity == null) {
@@ -3334,7 +3301,7 @@ public class ClusterImpl implements Cluster {
    * {@inheritDoc}
    */
   @Override
-  public UpgradeEntity getUpgradeEntity() {
+  public UpgradeEntity getUpgradeInProgress() {
     ClusterEntity clusterEntity = getClusterEntity();
     return clusterEntity.getUpgradeEntity();
   }
@@ -3350,10 +3317,7 @@ public class ClusterImpl implements Cluster {
       clusterEntity.setUpgradeEntity(upgradeEntity);
       clusterDAO.merge(clusterEntity);
     } catch (RollbackException e) {
-      String msg = "Unable to set upgrade entiry " + upgradeEntity + " for cluster "
-        + getClusterName();
-      LOG.warn(msg);
-      throw new AmbariException(msg, e);
+      throw new AmbariException("Unable to update the associated upgrade with the cluster", e);
     }
   }
 
@@ -3362,11 +3326,9 @@ public class ClusterImpl implements Cluster {
    */
   @Override
   public boolean isUpgradeSuspended() {
-    UpgradeEntity lastUpgradeItemForCluster = upgradeDAO.findLastUpgradeForCluster(clusterId,
-        Direction.UPGRADE);
-
-    if (null != lastUpgradeItemForCluster) {
-      return lastUpgradeItemForCluster.isSuspended();
+    UpgradeEntity upgrade = getUpgradeInProgress();
+    if (null != upgrade) {
+      return upgrade.isSuspended();
     }
 
     return false;

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/main/java/org/apache/ambari/server/state/services/RetryUpgradeActionService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/services/RetryUpgradeActionService.java b/ambari-server/src/main/java/org/apache/ambari/server/state/services/RetryUpgradeActionService.java
index 6d960c3..584ce98 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/services/RetryUpgradeActionService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/services/RetryUpgradeActionService.java
@@ -173,7 +173,7 @@ public class RetryUpgradeActionService extends AbstractScheduledService {
     }
 
     // May be null, and either upgrade or downgrade
-    UpgradeEntity currentUpgrade = cluster.getUpgradeEntity();
+    UpgradeEntity currentUpgrade = cluster.getUpgradeInProgress();
     if (currentUpgrade == null) {
       LOG.debug("There is no active stack upgrade in progress. Skip retrying failed tasks.");
       return null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
index 3e70989..999b7a7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
@@ -618,7 +618,7 @@ public class UpgradeResourceProviderTest {
     upgradeEntity.setToVersion("2.2.2.2");
     upgradeEntity.setUpgradePackage("upgrade_test");
     upgradeEntity.setUpgradeType(UpgradeType.ROLLING);
-    upgradeEntity.setRequestId(2L);
+    upgradeEntity.setRequestEntity(requestEntity);
 
     upgradeDao.create(upgradeEntity);
     upgrades = upgradeDao.findUpgrades(cluster.getClusterId());
@@ -1608,7 +1608,7 @@ public class UpgradeResourceProviderTest {
     Request request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
     upgradeResourceProvider.createResources(request);
 
-    List<StageEntity> stages = stageDao.findByRequestId(cluster.getUpgradeEntity().getRequestId());
+    List<StageEntity> stages = stageDao.findByRequestId(cluster.getUpgradeInProgress().getRequestId());
     Assert.assertEquals(3, stages.size());
 
     long expectedStageId = 1L;

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeSummaryResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeSummaryResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeSummaryResourceProviderTest.java
index 19dbb44..bee8983 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeSummaryResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeSummaryResourceProviderTest.java
@@ -202,10 +202,7 @@ public class UpgradeSummaryResourceProviderTest {
     ServiceComponentHostEvent event = new ServiceComponentHostOpInProgressEvent("ZOOKEEPER_SERVER", "h1", 1L);
     ServiceComponentHostEventWrapper eventWrapper = new ServiceComponentHostEventWrapper(event);
 
-    RequestEntity requestEntity = new RequestEntity();
-    requestEntity.setRequestId(upgradeRequestId);
-    requestEntity.setClusterId(cluster.getClusterId());
-    requestDAO.create(requestEntity);
+    RequestEntity requestEntity = requestDAO.findByPK(upgradeRequestId);
 
     // Create the stage and add it to the request
     StageEntity stageEntity = new StageEntity();
@@ -269,8 +266,15 @@ public class UpgradeSummaryResourceProviderTest {
     Set<Resource> resources = upgradeSummaryResourceProvider.getResources(requestResource, p1And2);
     assertEquals(0, resources.size());
 
+    RequestEntity requestEntity = new RequestEntity();
+    requestEntity.setRequestId(1L);
+    requestEntity.setClusterId(cluster.getClusterId());
+    requestEntity.setStatus(HostRoleStatus.PENDING);
+    requestEntity.setStages(new ArrayList<StageEntity>());
+    requestDAO.create(requestEntity);
+
     UpgradeEntity upgrade = new UpgradeEntity();
-    upgrade.setRequestId(upgradeRequestId);
+    upgrade.setRequestEntity(requestEntity);
     upgrade.setClusterId(cluster.getClusterId());
     upgrade.setId(1L);
     upgrade.setUpgradePackage("some-name");
@@ -298,7 +302,7 @@ public class UpgradeSummaryResourceProviderTest {
     Assert.assertNull(r.getPropertyValue(UpgradeSummaryResourceProvider.UPGRADE_SUMMARY_FAIL_REASON));
 
     // Case 4: Append a failed task to the Upgrade. Resource should have a failed reason.
-    RequestEntity requestEntity = requestDAO.findByPK(upgradeRequestId);
+    requestEntity = requestDAO.findByPK(upgradeRequestId);
     HostEntity h1 = hostDAO.findByName("h1");
 
     StageEntity nextStage = new StageEntity();

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
index bd9a340..7b8b68a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
@@ -248,7 +248,7 @@ public class StackVersionListenerTest extends EasyMockSupport {
   public void testSetUpgradeStateToCompleteWhenHostHasVersionMismatchAndNewVersionIsEqualToComponentDesiredVersionAndClusterUpgradeIsInProgress() {
     expect(sch.getVersion()).andReturn(VALID_PREVIOUS_VERSION);
     expect(sch.getUpgradeState()).andReturn(UpgradeState.VERSION_MISMATCH);
-    expect(cluster.getUpgradeEntity()).andReturn(DUMMY_UPGRADE_ENTITY);
+    expect(cluster.getUpgradeInProgress()).andReturn(DUMMY_UPGRADE_ENTITY);
     expect(serviceComponent.getDesiredVersion()).andStubReturn(VALID_NEW_VERSION);
     expect(serviceComponent.isVersionAdvertised()).andReturn(Boolean.TRUE);
     sch.setUpgradeState(UpgradeState.COMPLETE);

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
index 636108e..9e47e4d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
@@ -33,7 +33,6 @@ import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.OrmTestHelper;
-import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.RequestEntity;
 import org.apache.ambari.server.orm.entities.StageEntity;
 import org.apache.ambari.server.orm.entities.UpgradeEntity;
@@ -51,8 +50,7 @@ import com.google.inject.Guice;
 import com.google.inject.Injector;
 
 /**
- * Tests {@link AlertDefinitionDAO} for interacting with
- * {@link AlertDefinitionEntity}.
+ * Tests {@link UpgradeDAO} for interacting with {@link UpgradeEntity}.
  */
 public class UpgradeDAOTest {
 
@@ -78,10 +76,17 @@ public class UpgradeDAOTest {
     helper = injector.getInstance(OrmTestHelper.class);
     clusterId = helper.createCluster();
 
+    RequestEntity requestEntity = new RequestEntity();
+    requestEntity.setRequestId(99L);
+    requestEntity.setClusterId(clusterId.longValue());
+    requestEntity.setStatus(HostRoleStatus.PENDING);
+    requestEntity.setStages(new ArrayList<StageEntity>());
+    requestDAO.create(requestEntity);
+
     // create upgrade entities
     UpgradeEntity entity = new UpgradeEntity();
     entity.setClusterId(clusterId.longValue());
-    entity.setRequestId(Long.valueOf(1));
+    entity.setRequestEntity(requestEntity);
     entity.setFromVersion("");
     entity.setToVersion("");
     entity.setUpgradeType(UpgradeType.ROLLING);
@@ -159,7 +164,7 @@ public class UpgradeDAOTest {
     entity1.setId(11L);
     entity1.setClusterId(clusterId.longValue());
     entity1.setDirection(Direction.UPGRADE);
-    entity1.setRequestId(1L);
+    entity1.setRequestEntity(requestEntity);
     entity1.setFromVersion("2.2.0.0-1234");
     entity1.setToVersion("2.3.0.0-4567");
     entity1.setUpgradeType(UpgradeType.ROLLING);
@@ -170,7 +175,7 @@ public class UpgradeDAOTest {
     entity2.setId(22L);
     entity2.setClusterId(clusterId.longValue());
     entity2.setDirection(Direction.DOWNGRADE);
-    entity2.setRequestId(1L);
+    entity2.setRequestEntity(requestEntity);
     entity2.setFromVersion("2.3.0.0-4567");
     entity2.setToVersion("2.2.0.0-1234");
     entity2.setUpgradeType(UpgradeType.ROLLING);
@@ -181,7 +186,7 @@ public class UpgradeDAOTest {
     entity3.setId(33L);
     entity3.setClusterId(clusterId.longValue());
     entity3.setDirection(Direction.UPGRADE);
-    entity3.setRequestId(1L);
+    entity3.setRequestEntity(requestEntity);
     entity3.setFromVersion("2.2.0.0-1234");
     entity3.setToVersion("2.3.1.1-4567");
     entity3.setUpgradeType(UpgradeType.ROLLING);
@@ -211,7 +216,7 @@ public class UpgradeDAOTest {
     upgradeEntity.setId(11L);
     upgradeEntity.setClusterId(clusterId.longValue());
     upgradeEntity.setDirection(Direction.UPGRADE);
-    upgradeEntity.setRequestId(1L);
+    upgradeEntity.setRequestEntity(requestEntity);
     upgradeEntity.setFromVersion("2.2.0.0-1234");
     upgradeEntity.setToVersion("2.3.0.0-4567");
     upgradeEntity.setUpgradeType(UpgradeType.ROLLING);

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
index a17c40d..c6ee875 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
@@ -980,7 +980,7 @@ public class UpgradeActionTest {
     UpgradeEntity upgradeEntity = new UpgradeEntity();
     upgradeEntity.setId(1L);
     upgradeEntity.setClusterId(cluster.getClusterId());
-    upgradeEntity.setRequestId(requestEntity.getRequestId());
+    upgradeEntity.setRequestEntity(requestEntity);
     upgradeEntity.setUpgradePackage("");
     upgradeEntity.setFromVersion(sourceRepo);
     upgradeEntity.setToVersion(targetRepo);

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
index 5383113..8e8bed3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
@@ -23,12 +23,14 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.fail;
 
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.H2DatabaseCleaner;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.ServiceComponentResponse;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
@@ -38,15 +40,18 @@ import org.apache.ambari.server.orm.dao.HostComponentDesiredStateDAO;
 import org.apache.ambari.server.orm.dao.HostComponentStateDAO;
 import org.apache.ambari.server.orm.dao.HostDAO;
 import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.dao.RequestDAO;
 import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO;
 import org.apache.ambari.server.orm.dao.UpgradeDAO;
 import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
 import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.RequestEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentHistoryEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentVersionEntity;
+import org.apache.ambari.server.orm.entities.StageEntity;
 import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.state.stack.upgrade.Direction;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
@@ -643,6 +648,14 @@ public class ServiceComponentTest {
    * @return
    */
   private UpgradeEntity createUpgradeEntity(String fromVersion, String toVersion) {
+    RequestDAO requestDAO = injector.getInstance(RequestDAO.class);
+    RequestEntity requestEntity = new RequestEntity();
+    requestEntity.setRequestId(99L);
+    requestEntity.setClusterId(cluster.getClusterId());
+    requestEntity.setStatus(HostRoleStatus.PENDING);
+    requestEntity.setStages(new ArrayList<StageEntity>());
+    requestDAO.create(requestEntity);
+
     UpgradeDAO upgradeDao = injector.getInstance(UpgradeDAO.class);
     UpgradeEntity upgradeEntity = new UpgradeEntity();
     upgradeEntity.setClusterId(cluster.getClusterId());
@@ -651,7 +664,7 @@ public class ServiceComponentTest {
     upgradeEntity.setToVersion(toVersion);
     upgradeEntity.setUpgradePackage("upgrade_test");
     upgradeEntity.setUpgradeType(UpgradeType.ROLLING);
-    upgradeEntity.setRequestId(1L);
+    upgradeEntity.setRequestEntity(requestEntity);
 
     upgradeDao.create(upgradeEntity);
     List<UpgradeEntity> upgrades = upgradeDao.findUpgrades(cluster.getClusterId());

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java
index 222f2b0..d4dbdc8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java
@@ -540,7 +540,7 @@ public class AlertStateChangedEventTest extends EasyMockSupport {
     Cluster cluster = createMock(Cluster.class);
 
     EasyMock.expect(clusters.getClusterById(EasyMock.anyLong())).andReturn(cluster).atLeastOnce();
-    EasyMock.expect(cluster.getUpgradeEntity()).andReturn(null).anyTimes();
+    EasyMock.expect(cluster.getUpgradeInProgress()).andReturn(null).anyTimes();
     EasyMock.expect(cluster.isUpgradeSuspended()).andReturn(false).anyTimes();
   }
 
@@ -556,7 +556,7 @@ public class AlertStateChangedEventTest extends EasyMockSupport {
     EasyMock.reset(clusters);
 
     EasyMock.expect(clusters.getClusterById(EasyMock.anyLong())).andReturn(cluster).atLeastOnce();
-    EasyMock.expect(cluster.getUpgradeEntity()).andReturn(new UpgradeEntity()).anyTimes();
+    EasyMock.expect(cluster.getUpgradeInProgress()).andReturn(new UpgradeEntity()).anyTimes();
     EasyMock.expect(cluster.isUpgradeSuspended()).andReturn(false).anyTimes();
   }
 
@@ -572,7 +572,7 @@ public class AlertStateChangedEventTest extends EasyMockSupport {
     EasyMock.reset(clusters);
 
     EasyMock.expect(clusters.getClusterById(EasyMock.anyLong())).andReturn(cluster).atLeastOnce();
-    EasyMock.expect(cluster.getUpgradeEntity()).andReturn(null).anyTimes();
+    EasyMock.expect(cluster.getUpgradeInProgress()).andReturn(null).anyTimes();
     EasyMock.expect(cluster.isUpgradeSuspended()).andReturn(true).anyTimes();
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/64447e52/ambari-server/src/test/java/org/apache/ambari/server/state/services/RetryUpgradeActionServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/services/RetryUpgradeActionServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/services/RetryUpgradeActionServiceTest.java
index 2c0b507..f52f007 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/services/RetryUpgradeActionServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/services/RetryUpgradeActionServiceTest.java
@@ -285,7 +285,7 @@ public class RetryUpgradeActionServiceTest {
 
     UpgradeEntity upgrade = new UpgradeEntity();
     upgrade.setId(1L);
-    upgrade.setRequestId(upgradeRequestId);
+    upgrade.setRequestEntity(requestEntity);
     upgrade.setClusterId(cluster.getClusterId());
     upgrade.setUpgradePackage("some-name");
     upgrade.setUpgradeType(UpgradeType.ROLLING);


[21/41] ambari git commit: AMBARI-20656.Coordinator and bundle should retain job.properties in submission modal window(M Madhan Mohan Reddy via padmapriyanitt)

Posted by ao...@apache.org.
AMBARI-20656.Coordinator and bundle should retain job.properties in submission modal window(M Madhan Mohan Reddy via padmapriyanitt)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/caf6a254
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/caf6a254
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/caf6a254

Branch: refs/heads/branch-3.0-perf
Commit: caf6a254aa9577e4c596d898c930f7535c08dc48
Parents: 13729ed
Author: padmapriyanitt <pa...@gmail.com>
Authored: Tue Apr 4 12:00:04 2017 +0530
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 6 12:32:56 2017 +0300

----------------------------------------------------------------------
 .../src/main/resources/ui/app/components/bundle-config.js          | 1 +
 .../wfmanager/src/main/resources/ui/app/components/coord-config.js | 1 +
 .../main/resources/ui/app/templates/components/bundle-config.hbs   | 2 +-
 .../main/resources/ui/app/templates/components/coord-config.hbs    | 2 +-
 4 files changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/caf6a254/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
index 7c63b47..99f4fa7 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
@@ -46,6 +46,7 @@ export default Ember.Component.extend(Ember.Evented, Validations, {
   propertyExtractor : Ember.inject.service('property-extractor'),
   fileBrowser : Ember.inject.service('file-browser'),
   workspaceManager : Ember.inject.service('workspace-manager'),
+  jobConfigProperties: Ember.A([]),
   initialize : function(){
     var self = this;
     this.set('errors', Ember.A([]));

http://git-wip-us.apache.org/repos/asf/ambari/blob/caf6a254/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
index e8f8cc1..fba4db5 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
@@ -50,6 +50,7 @@ export default Ember.Component.extend(Validations, Ember.Evented, {
   propertyExtractor : Ember.inject.service('property-extractor'),
   workspaceManager : Ember.inject.service('workspace-manager'),
   showErrorMessage: Ember.computed.alias('saveAttempted'),
+  jobConfigProperties: Ember.A([]),
   datasetsForInputs : Ember.computed('coordinator.datasets.[]','coordinator.dataOutputs.[]',function(){
     var datasetsForInputs = Ember.copy(this.get('coordinator.datasets'));
     this.get('coordinator.dataOutputs').forEach((dataOutput)=>{

http://git-wip-us.apache.org/repos/asf/ambari/blob/caf6a254/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
index 39ab06d..b674990 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
@@ -124,7 +124,7 @@
   {{hdfs-browser closeFileBrowser="closeFileBrowser" selectFileCallback=selectFileCallback filePath=filePath}}
 {{/if}}
 {{#if showingJobConfig}}
-  {{job-config type='bundle' closeJobConfigs="closeBundleSubmitConfig" jobFilePath=bundleFilePath openFileBrowser="openFileBrowser" closeFileBrowser="closeFileBrowser" jobConfigs=bundleConfigs containsParameteriedPaths=containsParameteriedPaths}}
+  {{job-config type='bundle' closeJobConfigs="closeBundleSubmitConfig" jobFilePath=bundleFilePath openFileBrowser="openFileBrowser" closeFileBrowser="closeFileBrowser" jobConfigs=bundleConfigs containsParameteriedPaths=containsParameteriedPaths jobConfigProperties=jobConfigProperties}}
 {{/if}}
 {{#if showingResetConfirmation}}
   {{#confirmation-dialog title="Confirm Bundle Reset"

http://git-wip-us.apache.org/repos/asf/ambari/blob/caf6a254/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
index 0e35d0e..3b1b6a9 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
@@ -339,7 +339,7 @@
     parameterizedWorkflowPath=parameterizedWorkflowPath
     extractProperties="extractProperties" containsParameteriedPaths=containsParameteriedPaths
     jobFilePath=coordinatorFilePath openFileBrowser="openFileBrowser"
-    closeFileBrowser="closeFileBrowser" jobConfigs=coordinatorConfigs isDryrun=dryrun}}
+    closeFileBrowser="closeFileBrowser" jobConfigs=coordinatorConfigs isDryrun=dryrun jobConfigProperties=jobConfigProperties}}
 {{/if}}
 {{#if showingResetConfirmation}}
 {{#confirmation-dialog title="Confirm Coordinator Reset"