You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2017/02/13 22:13:09 UTC

[01/50] [abbrv] ambari git commit: Updated team page. (yusaku)

Repository: ambari
Updated Branches:
  refs/heads/branch-feature-AMBARI-12556 d222f572e -> 341cb1247


Updated team page. (yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d5755ba1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d5755ba1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d5755ba1

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d5755ba15a979aa94fd53338be27d2206f108065
Parents: 6addaf5
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Thu Feb 9 11:53:15 2017 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Thu Feb 9 11:53:15 2017 -0800

----------------------------------------------------------------------
 docs/pom.xml | 16 ++++++++++++++--
 1 file changed, 14 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d5755ba1/docs/pom.xml
----------------------------------------------------------------------
diff --git a/docs/pom.xml b/docs/pom.xml
index a128775..db907d1 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -151,6 +151,18 @@
             </organization>
         </developer>
         <developer>
+            <id>adoroszlai</id>
+            <name>Attila Doroszlai</name>
+            <email>adoroszlai@apache.org</email>
+            <timezone>+1</timezone>
+            <roles>
+                <role>Committer</role>
+            </roles>
+            <organization>
+                Hortonworks
+            </organization>
+        </developer>
+        <developer>
             <id>ajit</id>
             <name>Ajit Kumar</name>
             <email>ajit@apache.org</email>
@@ -740,7 +752,7 @@
                 <role>Committer</role>
             </roles>
             <organization>
-                Teraware
+                ITRenew Inc
             </organization>
         </developer>
         <developer>
@@ -1271,7 +1283,7 @@
         </contributor>
         <contributor>
           <name>Vivek Ratnavel Subramanian</name>
-          <organization>The Ohio State University</organization>
+          <organization>Hortonworks</organization>
         </contributor>
         <contributor>
           <name>Pramod Thangali</name>


[10/50] [abbrv] ambari git commit: AMBARI-19940. Hive View 2.0: Remove aria tags from templates. (dipayanb)

Posted by nc...@apache.org.
AMBARI-19940. Hive View 2.0: Remove aria tags from templates. (dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/09e35e1e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/09e35e1e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/09e35e1e

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 09e35e1e3de0206edbf139ed5cbf9a33b470c7f1
Parents: 2ce1042
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Fri Feb 10 15:06:13 2017 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Fri Feb 10 15:06:46 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/templates/components/alert-message.hbs | 2 +-
 .../main/resources/ui/app/templates/components/column-item.hbs   | 2 +-
 .../resources/ui/app/templates/components/confirm-dialog.hbs     | 2 +-
 .../main/resources/ui/app/templates/components/export-result.hbs | 2 +-
 .../resources/ui/app/templates/components/hdfs-viewer-modal.hbs  | 2 +-
 .../main/resources/ui/app/templates/components/info-dialog.hbs   | 2 +-
 .../resources/ui/app/templates/components/query-result-table.hbs | 4 ++--
 .../hive20/src/main/resources/ui/app/templates/databases.hbs     | 2 +-
 .../ui/app/templates/databases/database/tables/table.hbs         | 4 ++--
 .../hive20/src/main/resources/ui/app/templates/queries/query.hbs | 4 ++--
 .../hive20/src/main/resources/ui/app/templates/savedqueries.hbs  | 4 ++--
 .../hive20/src/main/resources/ui/app/templates/service-check.hbs | 2 +-
 12 files changed, 16 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/09e35e1e/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs
index ce8d941..2f114f9 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs
@@ -17,7 +17,7 @@
 }}
 
 <div class={{alert-message-context-class flash.type "clearfix alert alert-dismissible alert-"}}>
-  <button type="button" class="close" {{action "closeAlert"}}><span aria-hidden="true">&times;</span></button>
+  <button type="button" class="close" {{action "closeAlert"}}>&times;</button>
   <div class="alert-icon wrap-message">
     {{#fa-stack size=2}}
       {{fa-icon "circle-thin" stack=2}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/09e35e1e/contrib/views/hive20/src/main/resources/ui/app/templates/components/column-item.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/column-item.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/column-item.hbs
index b649d5b..21418c1 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/column-item.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/column-item.hbs
@@ -75,7 +75,7 @@
       container-class="modal-dialog"}}
       <div class="modal-content">
         <div class="modal-header text-danger">
-          <button type="button" class="close" {{action "advanceOptionToggle"}}><span aria-hidden="true">&times;</span></button>
+          <button type="button" class="close" {{action "advanceOptionToggle"}}>&times;</button>
           <h4 class="modal-title">{{fa-icon "cog" size="lg"}} Advance Column Options</h4>
         </div>
         <div class="modal-body">

http://git-wip-us.apache.org/repos/asf/ambari/blob/09e35e1e/contrib/views/hive20/src/main/resources/ui/app/templates/components/confirm-dialog.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/confirm-dialog.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/confirm-dialog.hbs
index d940237..b62e98b 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/confirm-dialog.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/confirm-dialog.hbs
@@ -24,7 +24,7 @@
   <div class="modal-content">
     <div class="modal-header text-{{titleClass}}">
       {{#if closable}}
-        <button type="button" class="close" {{action "reject"}}><span aria-hidden="true">&times;</span></button>
+        <button type="button" class="close" {{action "reject"}}>&times;</button>
       {{/if}}
       <p class="modal-title">{{#if titleIcon}}{{fa-icon titleIcon size="lg"}}{{/if}} {{title}}</p>
     </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09e35e1e/contrib/views/hive20/src/main/resources/ui/app/templates/components/export-result.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/export-result.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/export-result.hbs
index 0b462cb..69991f1 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/export-result.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/export-result.hbs
@@ -23,7 +23,7 @@
   container-class="modal-dialog"}}
   <div class="modal-content">
     <div class="modal-header text-{{titleClass}}">
-        <button type="button" class="close" {{action "reject"}}><span aria-hidden="true">&times;</span></button>
+        <button type="button" class="close" {{action "reject"}}>&times;</button>
     </div>
     <div class="modal-body">
       <p class="lead">{{#if labelIcon}}{{fa-icon labelIcon size="lg"}}{{/if}} {{label}}</p>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09e35e1e/contrib/views/hive20/src/main/resources/ui/app/templates/components/hdfs-viewer-modal.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/hdfs-viewer-modal.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/hdfs-viewer-modal.hbs
index f3dce67..aee6fc3 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/hdfs-viewer-modal.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/hdfs-viewer-modal.hbs
@@ -24,7 +24,7 @@
   <div class="modal-content hdfs-viewer">
     <div class="modal-header">
       <div class="text-info">
-        <button type="button" class="close" {{action "closeDirectoryViewer"}}><span aria-hidden="true">&times;</span></button>
+        <button type="button" class="close" {{action "closeDirectoryViewer"}}>&times;</button>
         <h4 class="modal-title">{{fa-icon "database" size="lg"}} Select location</h4>
       </div>
       {{#if (and showSelectedPath hdfsLocation)}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/09e35e1e/contrib/views/hive20/src/main/resources/ui/app/templates/components/info-dialog.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/info-dialog.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/info-dialog.hbs
index cf44f7e..59f7309 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/info-dialog.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/info-dialog.hbs
@@ -23,7 +23,7 @@
   <div class="modal-content">
     <div class="modal-header text-{{titleClass}}">
       {{#if closable}}
-        <button type="button" class="close" {{action "ok"}}><span aria-hidden="true">&times;</span></button>
+        <button type="button" class="close" {{action "ok"}}>&times;</button>
       {{/if}}
       <p class="modal-title">{{#if titleIcon}}{{fa-icon titleIcon size="lg"}}{{/if}} {{title}}</p>
     </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09e35e1e/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs
index 9700038..4d8524e 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs
@@ -29,8 +29,8 @@
 {{#unless isQueryRunning}}
   <div class="clearfix" style="text-align: right; padding-right:5px">
     <span class="dropdown">
-      <button class="btn btn-default dropdown-toggle" title="Actions" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true">{{fa-icon "bars"}} </button>
-      <ul class="dropdown-menu dropdown-menu-right" aria-labelledby="dropdownMenu">
+      <button class="btn btn-default dropdown-toggle" title="Actions" data-toggle="dropdown">{{fa-icon "bars"}} </button>
+      <ul class="dropdown-menu dropdown-menu-right">
         <li><a href="#" {{action "openSaveHdfsModal" }} class="text-uppercase">{{fa-icon "save"}} Save To HDFS</a></li>
         <li><a href="#" {{action "openDownloadCsvModal" }} class="text-uppercase">{{fa-icon "download"}} Download As CSV</a></li>
       </ul>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09e35e1e/contrib/views/hive20/src/main/resources/ui/app/templates/databases.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases.hbs
index bb4d680..ecedef5 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/databases.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases.hbs
@@ -19,7 +19,7 @@
 {{database-search-bar databases=model selected="databaseSelected"}}
 <div class="hv-dropdown database-dropdown">
   <div class="dropdown">
-    <button class="btn btn-default dropdown-toggle" type="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true">
+    <button class="btn btn-default dropdown-toggle" type="button" data-toggle="dropdown">
       {{fa-icon "navicon"}}
     </button>
     <ul class="dropdown-menu dropdown-menu-right">

http://git-wip-us.apache.org/repos/asf/ambari/blob/09e35e1e/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table.hbs
index 9a1306a..2fe3cfb 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table.hbs
@@ -21,10 +21,10 @@
     <p class="text-uppercase">table<strong>&nbsp;&nbsp;>&nbsp;&nbsp;{{model.table}}</strong></p>
     <div class="hv-dropdown tables-dropdown">
       <div class="dropdown">
-        <button class="btn btn-default dropdown-toggle" type="button" id="dropdownMenu1" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true">
+        <button class="btn btn-default dropdown-toggle" type="button" data-toggle="dropdown">
           {{fa-icon "navicon"}}
         </button>
-        <ul class="dropdown-menu dropdown-menu-right" aria-labelledby="dropdownMenu1">
+        <ul class="dropdown-menu dropdown-menu-right">
           <li>{{#link-to "databases.database.tables.table.edit" class="text-uppercase"}}{{fa-icon "edit"}} Edit{{/link-to}}</li>
           <li>{{#link-to "databases.database.tables.table.rename" class="text-uppercase"}}{{fa-icon "edit"}} Rename{{/link-to}}</li>
           <li><a href="#" class="text-uppercase" {{action "deleteTable" model}}>{{fa-icon "trash"}} Delete</a></li>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09e35e1e/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
index 8ee8e87..2d6c5aa 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
@@ -112,13 +112,13 @@
 <div class="col-md-3 database-panel">
   <div class="database-container">
     <div class="row">
-    <div class="panel-group database-panel" id="db_accordion" role="tablist" aria-multiselectable="true">
+    <div class="panel-group database-panel" id="db_accordion" role="tablist">
       {{#each selectedTablesModels as |tableModel|}}
         <div class="panel panel-default">
           <div class="panel-heading" role="tab" id={{concat 'db_heading_' tableModel.dbname}}>
             <h4 class="panel-title">
               <a role="button" data-i-toggle="collapse" data-i-parent="#db_accordion"
-                 href="javascript:void(0)" {{action 'showTables' tableModel.dbname }} aria-expanded="true"
+                 href="javascript:void(0)" {{action 'showTables' tableModel.dbname }}
                  aria-controls={{concat 'db_body_' tableModel.dbname}}>
                 {{ tableModel.dbname }} {{#if (eq tableModel.dbname worksheet.selectedDb)}} {{fa-icon "check"}}  {{/if}}
               </a>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09e35e1e/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
index 287fc34..36dc982 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
@@ -37,8 +37,8 @@
           <td>{{savedQuery.owner}}</td>
           <td>
               <div class="dropdown">
-                <a class="dropdown-toggle" id="dropdownMenu1121" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true">{{fa-icon "cog"}}</a>
-                <ul class="dropdown-menu dropdown-menu-right" aria-labelledby="dropdownMenu">
+                <a class="dropdown-toggle" data-toggle="dropdown">{{fa-icon "cog"}}</a>
+                <ul class="dropdown-menu dropdown-menu-right"">
                   <li><a href="#" {{action "historySavedQuery" savedQuery.id }} class="text-uppercase">{{fa-icon "history"}} History</a></li>
                   <li><a href="#" {{action "openDeleteSavedQueryModal" savedQuery.id}} class="text-uppercase">{{fa-icon "remove"}} Delete</a></li>
                   <li><a href="#" {{action "openAsWorksheet" savedQuery }} class="text-uppercase">{{fa-icon "folder-open-o"}} Open as worksheet</a></li>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09e35e1e/contrib/views/hive20/src/main/resources/ui/app/templates/service-check.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/service-check.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/service-check.hbs
index 8233c93..2a867aa 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/service-check.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/service-check.hbs
@@ -18,7 +18,7 @@
 
 <div class="col-md-offset-2 col-md-8">
   <div class="progress">
-    <div class="progress-bar {{if hasError 'progress-bar-danger'}} {{if serviceCheck.checkCompleted 'progress-bar-success'}}" role="progressbar" aria-valuenow="60" aria-valuemin="0" aria-valuemax="100" style={{progressStyle}}>
+    <div class="progress-bar {{if hasError 'progress-bar-danger'}} {{if serviceCheck.checkCompleted 'progress-bar-success'}}" role="progressbar" style={{progressStyle}}>
     </div>
   </div>
 


[14/50] [abbrv] ambari git commit: AMBARI-19955 : added the is first row header functionality to upload table (nitirajrathore)

Posted by nc...@apache.org.
AMBARI-19955 : added the is first row header functionality to upload table (nitirajrathore)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d7e11e9a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d7e11e9a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d7e11e9a

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d7e11e9abfde98618ff90f96b8a0952c30c6bef1
Parents: 97994e2
Author: Nitiraj Singh Rathore <ni...@gmail.com>
Authored: Fri Feb 10 16:15:59 2017 +0530
Committer: Nitiraj Singh Rathore <ni...@gmail.com>
Committed: Fri Feb 10 16:17:49 2017 +0530

----------------------------------------------------------------------
 .../ui/app/components/csv-format-params.js      |   1 -
 .../src/main/resources/ui/app/models/column.js  |   8 +-
 .../databases/database/tables/upload-table.js   | 199 +++----------------
 .../ui/app/services/table-operations.js         |   4 +-
 .../templates/components/csv-format-params.hbs  |  12 ++
 5 files changed, 49 insertions(+), 175 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d7e11e9a/contrib/views/hive20/src/main/resources/ui/app/components/csv-format-params.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/csv-format-params.js b/contrib/views/hive20/src/main/resources/ui/app/components/csv-format-params.js
index 7a14ba8..5a8f00d 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/csv-format-params.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/csv-format-params.js
@@ -25,7 +25,6 @@ export default Ember.Component.extend({
   DEFAULT_CSV_QUOTE: '"',
   DEFAULT_CSV_ESCAPE: '\\',
   DEFAULT_FILE_TYPE: 'CSV',
-  isFirstRowHeader: false, // is first row  header
   csvParams: Ember.Object.create(),
   inputFileTypes: Ember.computed(function () {
     return Helpers.getUploadFileTypes();

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7e11e9a/contrib/views/hive20/src/main/resources/ui/app/models/column.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/column.js b/contrib/views/hive20/src/main/resources/ui/app/models/column.js
index f38309b..73a9824 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/models/column.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/column.js
@@ -90,9 +90,11 @@ let Column = Ember.Object.extend(Ember.Copyable,{
       } else if(this.get('type.hasScale') && this.get('scale') && (this.get('precision') < this.get('scale'))) {
         this.get('errors').pushObject({type: 'precision', error: "Precision can only be greater than scale"});
       }
-
+    }else{
+      delete this.precision;
     }
 
+
     if(this.get('type.hasScale')) {
       if(Ember.isEmpty(this.get('scale'))) {
         this.get('errors').pushObject({type: 'scale', error: "Scale cannot be empty"});
@@ -101,8 +103,10 @@ let Column = Ember.Object.extend(Ember.Copyable,{
       } else if(this.get('scale') <= 0) {
         this.get('errors').pushObject({type: 'scale', error: "Scale can only be greater than zero"});
       }
-
+    }else{
+      delete this.scale;
     }
+
     return this.get('errors.length') === 0;
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7e11e9a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
index ba3260c..0e61905 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
@@ -21,44 +21,31 @@ import NewTable from './new';
 import constants from '../../../../utils/constants';
 import Column from '../../../../models/column';
 import datatypes from '../../../../configs/datatypes';
+import Helpers from '../../../../configs/helpers';
 
 export default NewTable.extend({
   COLUMN_NAME_REGEX: "^[a-zA-Z]{1}[a-zA-Z0-9_]*$",
   TABLE_NAME_REGEX: "^[a-zA-Z]{1}[a-zA-Z0-9_]*$",
   HDFS_PATH_REGEX: "^[/]{1}.+",  // unix path allows everything but here we have to mention full path so starts with /
-  init: function () {
-    this._super();
-  },
-
-  COLUMN_NAME_PREFIX : "column",
-  i18n : Ember.inject.service('i18n'),
+  i18n : Ember.inject.service("i18n"),
   jobService: Ember.inject.service(constants.services.jobs),
   notifyService: Ember.inject.service(constants.services.alertMessages),
   showErrors: false,
-  baseUrl: "/resources/upload",
-  header: null,  // header received from server
-  files: null, // files that need to be uploaded only file[0] is relevant
-  firstRow: [], // the actual first row of the table.
-  rows: null,  // preview rows received from server
-  databaseName: null,
-  selectedDatabase: null,
-  filePath: null,
-  tableName: null,
-  uploadProgressInfos : [],
-  DEFAULT_DB_NAME : 'default',
-  showPreview : false,
-  containsEndlines: false,
-  storedAsTextFile : Ember.computed.equal("selectedFileType","TEXTFILE"),
-  storedAsNotTextFile : Ember.computed.not("storedAsTextFile"),
+  init: function () {
+    this._super();
+  },
   setupController(controller, model) {
     this._super(controller, model);
     this.controller.set("showUploadTableModal", false);
   },
-  onChangeSelectedFileType: function(){
-    if(this.get('selectedFileType') === this.get('fileTypes')[1] && this.get('containsEndlines') === true){
-      this.set('containsEndlines', false);
-    }
-  }.observes("selectedFileType", "containsEndlines"),
+  getCharOptionByCharCode: function(charCode){
+    return Helpers.getAllTerminationCharacters().findBy("id", charCode + "");
+  },
+  // onChangeSelectedFileType: function(){
+  //   if(this.get('selectedFileType') === this.get('fileTypes')[1] && this.get('containsEndlines') === true){
+  //     this.set('containsEndlines', false);
+  //   }
+  // }.observes("selectedFileType", "containsEndlines"),
   getUploader(){
     return this.get('store').adapterFor('upload-table');
   },
@@ -78,37 +65,12 @@ export default NewTable.extend({
       Ember.set(item, 'name',  valueArray[index]);
     }, this);
   },
-  isFirstRowHeaderDidChange: function () {
-    if (this.get('isFirstRowHeader') != null && typeof this.get('isFirstRowHeader') !== 'undefined') {
-      if (this.get('isFirstRowHeader') == false) {
-        if (this.get('rows')) {
-          this.get('rows').unshiftObject({row: this.get('firstRow')});
-          this._setHeaderElements(this.get('header'),this.get('defaultColumnNames'));
-        }
-      } else if (this.get('header')) { // headers are available
-        // take first row of
-        this._setHeaderElements(this.get('header'),this.get('firstRow'));
-        this.get('rows').removeAt(0);
-      }
-
-      this.printValues();
-    }
-  }.observes('isFirstRowHeader'),
-
-  popUploadProgressInfos: function () {
-    // var msg = this.get('uploadProgressInfos').popObject();
-  },
 
   pushUploadProgressInfos : function(info){
     this.controller.set("uploadTableMessage", info);
     this.showUploadModal();
-    // this.get('uploadProgressInfos').pushObject(info);
   },
   clearUploadProgressModal : function(){
-    var len = this.get('uploadProgressInfos').length;
-    for( var i = 0 ; i < len ; i++){
-      this.popUploadProgressInfos();
-    }
   },
 
   hideUploadModal : function(){
@@ -121,28 +83,8 @@ export default NewTable.extend({
   },
 
   clearFields: function () {
-    this.set("showPreview",false);
-    this.set("hdfsPath");
-    this.set("header");
-    this.set("rows");
-    this.set("escapedBy");
-    this.set("fieldsTerminatedBy");
     this.set("error");
-    this.set('files');
-    this.set("firstRow");
-    this.set("selectedDatabase",null);
-    this.set("databaseName");
-    this.set("filePath");
-    this.set('tableName');
     this.clearUploadProgressModal();
-    this.printValues();
-  },
-
-  printValues: function () {
-    console.log("header : ", this.get('header'),
-      ". rows : ",this.get('rows'),". error : ", this.get('error'),
-      " isFirstRowHeader : ", this.get('isFirstRowHeader'),
-      "firstRow : ", this.get('firstRow'));
   },
 
   generateTempTableName: function () {
@@ -187,39 +129,14 @@ export default NewTable.extend({
     let csvParams = sourceObject.get("fileFormatInfo.csvParams");
 
     return this.getUploader().uploadFiles('preview', files, {
-      "isFirstRowHeader": sourceObject.get("isFirstRowHeader"),
       "inputFileType": sourceObject.get("fileFormatInfo.inputFileType").id,
+      "isFirstRowHeader": csvParams.get("isFirstRowHeader"),
       "csvDelimiter": csvParams.get("csvDelimiter").name,
       "csvEscape": csvParams.get("csvEscape").name,
       "csvQuote": csvParams.get("csvQuote").name
     });
   },
 
-  getAsciiChar : function(key){
-    if(!key){
-      return null;
-    }
-
-    var value = this.get(key);
-    if(value && value.id != -1) {
-      return String.fromCharCode(value.id);
-    }else{
-      return null;
-    }
-  },
-  getCSVParams : function(){
-    var csvd = this.getAsciiChar('csvDelimiter');
-    if(!csvd && csvd != 0) csvd = this.get('DEFAULT_CSV_DELIMITER');
-
-    var csvq = this.getAsciiChar('csvQuote');
-    if(!csvq && csvq != 0) csvq = this.get('DEFAULT_CSV_QUOTE');
-
-    var csve = this.getAsciiChar('csvEscape');
-    if(!csve && csve != 0) csve = this.get('DEFAULT_CSV_ESCAPE');
-
-    return {"csvDelimiter": csvd, "csvQuote" : csvq, "csvEscape": csve};
-  },
-
   uploadForPreviewFromHDFS: function (sourceObject) {
     console.log("uploadForPreviewFromHDFS called.");
     // this.validateHDFSPath(hdfsPath);
@@ -228,9 +145,9 @@ export default NewTable.extend({
     var csvParams = sourceObject.get("fileFormatInfo.csvParams");
 
     return this.getUploader().previewFromHDFS({
-      "isFirstRowHeader": sourceObject.get("fileFormatInfo.isFirstRowHeader"),
       "inputFileType": sourceObject.get("fileFormatInfo.inputFileType").id,
       "hdfsPath": hdfsPath,
+      "isFirstRowHeader": csvParams.get("isFirstRowHeader"),
       "csvDelimiter": csvParams.get("csvDelimiter").name,
       "csvEscape": csvParams.get("csvEscape").name,
       "csvQuote": csvParams.get("csvQuote").name
@@ -254,6 +171,7 @@ export default NewTable.extend({
         self.onGeneratePreviewFailure(error);
       }).catch(function (error) {
         console.log("inside catch : ", error);
+        throw error;
       }).finally(function () {
         console.log("finally hide the modal always after preview.");
         self.hideUploadModal();
@@ -276,25 +194,8 @@ export default NewTable.extend({
 
   previewTable: function (data) {
     console.log('inside previewTable. data : ', data);
-    var self = this;
-    var defaultColumnNames = data.header.map(function(item,index){
-      return { "name": self.COLUMN_NAME_PREFIX + (index + 1) }
-    });
-    this.set("defaultColumnNames",defaultColumnNames);
     this.set("previewData", data);
-    this.set("header", this.get("previewData.header"));
-    this.set('isFirstRowHeader', this.get("previewData.isFirstRowHeader"));
-    this.set('tableName', this.get("previewData.tableName"));
-    if (data.isFirstRowHeader == true) {
-      this.set("firstRow", this.get("previewData.header"));
-    }else {
-      if(data.rows.length > 0){
-        this.set("firstRow", this.get("previewData.rows")[0].row);
-      }else{
-        this.set("firstRow", Ember.A());
-      }
-    }
-    this.set("rows", this.get("previewData.rows"));
+    this.controller.set('tableName', this.get("previewData.tableName"));
     this.controller.set('tableName', this.get("previewData.tableName"));
     this.controller.set("rows", this.get("previewData.rows"));
     this.controller.set("columns", this.transformToColumnModelList(this.get("previewData.header")));
@@ -336,31 +237,6 @@ export default NewTable.extend({
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateActualTable'));
     var retValue = this.createTable(tableData.get("tableMeta"));
     return retValue;
-    // var self = this;
-    this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateActualTable'));
-    // var headers = this.get('header');
-    // var selectedDatabase = this.get('selectedDatabase');
-    // if (!selectedDatabase) {
-    //   throw new Error(this.translate('hive.errors.emptyDatabase', {database : this.translate("hive.words.database")}));
-    // }
-    //
-    // this.set('databaseName', this.get('selectedDatabase.id'));
-    // var databaseName = this.get('databaseName');
-    // var tableName = this.get("tableMeta").name;
-    // var isFirstRowHeader = this.get('isFirstRowHeader');
-    // var filetype = this.get("selectedFileType");
-    //
-    // this.validateInput(headers,tableName,databaseName,isFirstRowHeader);
-    // this.showUploadModal();
-    // var rowFormat = this.getRowFormat();
-    // return this.getUploader().createTable({
-    //   "isFirstRowHeader": isFirstRowHeader,
-    //   "header": headers,
-    //   "tableName": tableName,
-    //   "databaseName": databaseName,
-    //   "hiveFileType":filetype,
-    //   "rowFormat": { "fieldsTerminatedBy" : rowFormat.fieldsTerminatedBy, "escapedBy" : rowFormat.escapedBy}
-    // });
   },
   getRowFormat : function(){
     var fieldsTerminatedBy = this.getAsciiChar('fieldsTerminatedBy');
@@ -369,7 +245,6 @@ export default NewTable.extend({
   },
   waitForCreateActualTable: function (jobId) {
     console.log("waitForCreateActualTable");
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToCreateActualTable'));
     var self = this;
     var p = new Ember.RSVP.Promise(function (resolve, reject) {
@@ -380,12 +255,10 @@ export default NewTable.extend({
   },
   onCreateActualTableSuccess: function () {
     console.log("onCreateTableSuccess");
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyCreatedActualTable'));
   },
   onCreateActualTableFailure: function (error) {
     console.log("onCreateActualTableFailure");
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToCreateActualTable'));
     this.setError(error);
   },
@@ -420,20 +293,19 @@ export default NewTable.extend({
     tableMeta.settings.fileFormat = {};
     tableMeta.settings.fileFormat.type = "TEXTFILE";
     this.set("tableData.tempTableMeta", tableMeta);
+    if(!tableMeta.settings){
+      tableMeta.settings = {};
+    }
+    if(!tableMeta.settings.rowFormat){
+      tableMeta.settings.rowFormat = {};
+    }
+    tableMeta.settings.rowFormat.fieldTerminatedBy = this.getCharOptionByCharCode(1);
+    tableMeta.settings.rowFormat.escapeDefinedAs = this.getCharOptionByCharCode(0);
     return this.createTable(tableMeta);
-    // return this.getUploader().createTable({
-    //   "isFirstRowHeader": this.get("isFirstRowHeader"),
-    //   "header": headers,
-    //   "tableName": tempTableName,
-    //   "databaseName": this.get('databaseName'),
-    //   "hiveFileType":"TEXTFILE",
-    //   "rowFormat": { "fieldsTerminatedBy" : parseInt('1', 10), "escapedBy" : null}
-    // });
   },
 
   waitForCreateTempTable: function (jobId) {
     console.log("waitForCreateTempTable");
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToCreateTemporaryTable'));
     var self = this;
     var p = new Ember.RSVP.Promise(function (resolve, reject) {
@@ -445,7 +317,6 @@ export default NewTable.extend({
 
   onCreateTempTableSuccess: function () {
     console.log("onCreateTempTableSuccess");
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyCreatedTemporaryTable'));
   },
 
@@ -467,11 +338,9 @@ export default NewTable.extend({
         self.waitForJobStatus(job.id, resolve, reject);
       });
     }).then(function () {
-      self.popUploadProgressInfos();
       self.pushUploadProgressInfos(this.formatMessage('hive.messages.succesfullyDeletedTable',{table:tableLabel}));
       return Ember.RSVP.Promise.resolve();
     }, function (err) {
-      self.popUploadProgressInfos();
       self.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToDeleteTable',{table:tableLabel}));
       self.setError(err);
       return Ember.RSVP.Promise.reject();
@@ -479,7 +348,7 @@ export default NewTable.extend({
   },
 
   rollBackActualTableCreation : function(){
-    return this.deleteTableOnError(this.get("database"),this.get("tableMeta").name,this.translate('hive.words.actual'));
+    return this.deleteTableOnError(this.get("tableData.database"),this.get("tableData.tableMeta").name,this.translate('hive.words.actual'));
   },
 
   translate : function(str,vars){
@@ -491,7 +360,6 @@ export default NewTable.extend({
   onCreateTempTableFailure : function(error){
     console.log("onCreateTempTableFailure");
     this.setError(error);
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToCreateTemporaryTable'));
     return this.rollBackActualTableCreation().then(function(data){
       return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
@@ -513,7 +381,6 @@ export default NewTable.extend({
 
   waitForUploadingFile: function (data) {
     console.log("waitForUploadingFile");
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToUploadFile'));
     if( data.jobId ){
       var self = this;
@@ -528,13 +395,12 @@ export default NewTable.extend({
 
   onUploadingFileSuccess: function () {
     console.log("onUploadingFileSuccess");
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyUploadedFile') );
   },
 
   rollBackTempTableCreation: function () {
     var self = this;
-    return this.deleteTableOnError(this.get("database"),this.get("tempTableMeta").name,this.translate('hive.words.temporary')).then(function(data){
+    return this.deleteTableOnError(this.get("tableData.database"),this.get("tableData.tempTableMeta").name,this.translate('hive.words.temporary')).then(function(data){
       return self.rollBackActualTableCreation();
     },function(err){
       return self.rollBackActualTableCreation();
@@ -544,7 +410,6 @@ export default NewTable.extend({
   onUploadingFileFailure: function (error) {
     console.log("onUploadingFileFailure");
     this.setError(error);
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToUploadFile'));
     return this.rollBackTempTableCreation().then(function(data){
       return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
@@ -579,7 +444,6 @@ export default NewTable.extend({
 
   waitForInsertIntoTable: function (jobId) {
     console.log("waitForInsertIntoTable");
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToInsertRows'));
     var self = this;
     var p = new Ember.RSVP.Promise(function (resolve, reject) {
@@ -591,14 +455,12 @@ export default NewTable.extend({
 
   onInsertIntoTableSuccess: function () {
     console.log("onInsertIntoTableSuccess");
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyInsertedRows'));
   },
 
   onInsertIntoTableFailure: function (error) {
     console.log("onInsertIntoTableFailure");
     this.setError(error);
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToInsertRows'));
     return this.rollBackUploadFile().then(function(data){
       return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
@@ -617,7 +479,6 @@ export default NewTable.extend({
   },
   waitForDeleteTempTable: function (jobId) {
     console.log("waitForDeleteTempTable");
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToDeleteTemporaryTable'));
     var self = this;
     var p = new Ember.RSVP.Promise(function (resolve, reject) {
@@ -628,7 +489,6 @@ export default NewTable.extend({
   },
   onDeleteTempTableSuccess: function () {
     console.log("onDeleteTempTableSuccess");
-    this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyDeletedTemporaryTable'));
     this.onUploadSuccessfull();
   },
@@ -801,32 +661,31 @@ export default NewTable.extend({
     let header = columns; //JSON.stringify(columns);
 
     return this.getUploader().uploadFromHDFS({
-      "isFirstRowHeader": tableData.get("fileFormatInfo.isFirstRowHeader"),
       "databaseName": tableData.get("database"),
       "tableName": tableData.get("tempTableMeta").name,
       "inputFileType": tableData.get("fileFormatInfo.inputFileType").id,
       "hdfsPath": tableData.get("fileInfo.hdfsPath"),
       "header": header,
       "containsEndlines": tableData.get("fileFormatInfo.containsEndlines"),
+      "isFirstRowHeader": csvParams.get("isFirstRowHeader"),
       "csvDelimiter": csvParams.get("csvDelimiter").name,
       "csvEscape": csvParams.get("csvEscape").name,
       "csvQuote": csvParams.get("csvQuote").name
     });
   },
   uploadTable: function (tableData) {
-    this.printValues();
     var csvParams = tableData.get("fileFormatInfo.csvParams");
     let columns = tableData.get("tableMeta").columns.map(function(column){
       return {"name": column.get("name"), "type": column.get("type.label")};
     });
     let header = JSON.stringify(columns);
     return this.getUploader().uploadFiles('upload', tableData.get("fileInfo.files"), {
-      "isFirstRowHeader": tableData.get("fileFormatInfo.isFirstRowHeader"),
       "databaseName" :  tableData.get("database"),
       "tableName" : tableData.get("tempTableMeta").name,
       "inputFileType" : tableData.get("fileFormatInfo.inputFileType").id,
       "header": header,
       "containsEndlines": tableData.get("fileFormatInfo.containsEndlines"),
+      "isFirstRowHeader": csvParams.get("isFirstRowHeader"),
       "csvDelimiter": csvParams.get("csvDelimiter").name,
       "csvEscape": csvParams.get("csvEscape").name,
       "csvQuote": csvParams.get("csvQuote").name

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7e11e9a/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js b/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js
index a5be574..2071317 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js
@@ -178,12 +178,12 @@ export default Ember.Service.extend({
       }
 
       if (!Ember.isEmpty(storageSettings.rowFormat.nullDefinedAs)) {
-        parameters['serialization.null.format'] = String.fromCharCode(storageSettings.rowFormat.fieldTerminatedBy.id);
+        parameters['serialization.null.format'] = String.fromCharCode(storageSettings.rowFormat.nullDefinedAs.id);
         addParameters = true;
       }
 
       if (!Ember.isEmpty(storageSettings.rowFormat.escapeDefinedAs)) {
-        parameters['escape.delim'] = String.fromCharCode(storageSettings.rowFormat.linesTerminatedBy.id);
+        parameters['escape.delim'] = String.fromCharCode(storageSettings.rowFormat.escapeDefinedAs.id);
         addParameters = true;
       }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7e11e9a/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
index a7cb862..c63f502 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
@@ -110,6 +110,18 @@
         </div>
       </div>
     </div>
+    <div class="row">
+      <div class="col-md-6 form-horizontal">
+        <div class="form-group">
+          <label class="col-md-2 control-label">Is First Row Header</label>
+          <div class="col-md-4">
+            <label>
+              {{input type="checkbox" checked=fileFormatInfo.csvParams.isFirstRowHeader}}
+            </label>
+          </div>
+        </div>
+      </div>
+    </div>
     {{/if}}
   </div>
   {{/if}}


[21/50] [abbrv] ambari git commit: AMBARI-19960 No config properties are changed after deleting service if it's performed after service page refresh. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-19960 No config properties are changed after deleting service if it's performed after service page refresh. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/eecb5a31
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/eecb5a31
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/eecb5a31

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: eecb5a311626f2bab09688d37949f0ccedde86a8
Parents: c9bea4a
Author: ababiichuk <ab...@hortonworks.com>
Authored: Fri Feb 10 16:02:12 2017 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Fri Feb 10 18:22:30 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/controllers/main/service/item.js | 67 +++++++++++---------
 .../test/controllers/main/service/item_test.js  | 46 +++++++++++++-
 2 files changed, 81 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/eecb5a31/ambari-web/app/controllers/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/item.js b/ambari-web/app/controllers/main/service/item.js
index 7010ab5..a7ed493 100644
--- a/ambari-web/app/controllers/main/service/item.js
+++ b/ambari-web/app/controllers/main/service/item.js
@@ -144,8 +144,9 @@ App.MainServiceItemController = Em.Controller.extend(App.SupportClientConfigsDow
    * @type {String[]}
    */
   dependentServiceNames: function() {
-    return App.StackService.find(this.get('content.serviceName')).get('dependentServiceNames');
-  }.property('content.serviceName'),
+    return App.get('router.clusterController.isConfigsPropertiesLoaded') ?
+      App.StackService.find(this.get('content.serviceName')).get('dependentServiceNames') : [];
+  }.property('content.serviceName', 'App.router.clusterController.isConfigsPropertiesLoaded'),
 
   /**
    * List of service names that could be deleted
@@ -204,32 +205,35 @@ App.MainServiceItemController = Em.Controller.extend(App.SupportClientConfigsDow
    */
   onLoadConfigsTags: function (data) {
     var self = this;
-    var sitesToLoad = this.get('sitesToLoad'), allConfigs = [];
-    var loadedSites = data.Clusters.desired_configs;
-    var siteTagsToLoad = [];
-    for (var site in loadedSites) {
-      if (sitesToLoad.contains(site)) {
-        siteTagsToLoad.push({
-          siteName: site,
-          tagName: loadedSites[site].tag
-        });
+    App.get('router.mainController.isLoading').call(App.get('router.clusterController'), 'isConfigsPropertiesLoaded').done(function () {
+      var sitesToLoad = self.get('sitesToLoad'),
+        allConfigs = [],
+        loadedSites = data.Clusters.desired_configs,
+        siteTagsToLoad = [];
+      for (var site in loadedSites) {
+        if (sitesToLoad.contains(site)) {
+          siteTagsToLoad.push({
+            siteName: site,
+            tagName: loadedSites[site].tag
+          });
+        }
       }
-    }
-    App.router.get('configurationController').getConfigsByTags(siteTagsToLoad).done(function (configs) {
-      configs.forEach(function (site) {
-        self.get('configs')[site.type] = site.properties;
-        allConfigs = allConfigs.concat(App.config.getConfigsFromJSON(site, true));
-      });
+      App.router.get('configurationController').getConfigsByTags(siteTagsToLoad).done(function (configs) {
+        configs.forEach(function (site) {
+          self.get('configs')[site.type] = site.properties;
+          allConfigs = allConfigs.concat(App.config.getConfigsFromJSON(site, true));
+        });
 
-      self.get('dependentServiceNames').forEach(function(serviceName) {
-        var configTypes = App.StackService.find(serviceName).get('configTypeList');
-        var configsByService = allConfigs.filter(function (c) {
-          return configTypes.contains(App.config.getConfigTagFromFileName(c.get('filename')));
+        self.get('dependentServiceNames').forEach(function(serviceName) {
+          var configTypes = App.StackService.find(serviceName).get('configTypeList');
+          var configsByService = allConfigs.filter(function (c) {
+            return configTypes.contains(App.config.getConfigTagFromFileName(c.get('filename')));
+          });
+          self.get('stepConfigs').pushObject(App.config.createServiceConfig(serviceName, [], configsByService));
         });
-        self.get('stepConfigs').pushObject(App.config.createServiceConfig(serviceName, [], configsByService));
-      });
 
-      self.set('isServiceConfigsLoaded', true);
+        self.set('isServiceConfigsLoaded', true);
+      });
     });
   },
 
@@ -1290,7 +1294,13 @@ App.MainServiceItemController = Em.Controller.extend(App.SupportClientConfigsDow
       popupHeader = Em.I18n.t('services.service.delete.popup.header'),
       popupPrimary = Em.I18n.t('common.delete'),
       warningMessage = Em.I18n.t('services.service.delete.popup.warning').format(displayName) +
-        (interDependentServices.length ? Em.I18n.t('services.service.delete.popup.warning.dependent').format(dependentServicesToDeleteFmt) : '');
+        (interDependentServices.length ? Em.I18n.t('services.service.delete.popup.warning.dependent').format(dependentServicesToDeleteFmt) : ''),
+      callback = this.loadConfigRecommendations.bind(this, null, function () {
+        var serviceNames = self.get('changedProperties').mapProperty('serviceName').uniq();
+        self.loadConfigGroups(serviceNames).done(function () {
+          self.set('isRecommendationInProgress', false);
+        })
+      });
     this.clearRecommendations();
     this.setProperties({
       isRecommendationInProgress: true,
@@ -1298,12 +1308,7 @@ App.MainServiceItemController = Em.Controller.extend(App.SupportClientConfigsDow
         isDefault: true
       })
     });
-    this.loadConfigRecommendations(null, function () {
-      var serviceNames = self.get('changedProperties').mapProperty('serviceName').uniq();
-      self.loadConfigGroups(serviceNames).done(function () {
-        self.set('isRecommendationInProgress', false);
-      })
-    });
+    App.get('router.mainController.isLoading').call(this, 'isServiceConfigsLoaded').done(callback);
     return App.ModalPopup.show({
       controller: self,
       header: popupHeader,

http://git-wip-us.apache.org/repos/asf/ambari/blob/eecb5a31/ambari-web/test/controllers/main/service/item_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/service/item_test.js b/ambari-web/test/controllers/main/service/item_test.js
index 302a9a5..6430ab6 100644
--- a/ambari-web/test/controllers/main/service/item_test.js
+++ b/ambari-web/test/controllers/main/service/item_test.js
@@ -1719,7 +1719,7 @@ describe('App.MainServiceItemController', function () {
       mainServiceItemController.deleteServiceCall.restore();
     });
 
-    it("window.location.reload should be called", function() {
+    it("saveConfigs should be called", function() {
       mainServiceItemController.deleteServiceCallSuccessCallback([], null, {});
       expect(mainServiceItemController.deleteServiceCall.called).to.be.false;
       expect(mainServiceItemController.saveConfigs.calledOnce).to.be.true;
@@ -1878,4 +1878,48 @@ describe('App.MainServiceItemController', function () {
       expect(mainServiceItemController.isRangerPluginEnabled()).to.be.true;
     });
   });
+
+  describe('#dependentServiceNames', function () {
+
+    var controller,
+      serviceName = 's0',
+      dependentServiceNames = ['s1', 's2'],
+      testCases = [
+        {
+          isConfigsPropertiesLoaded: true,
+          dependentServiceNames: dependentServiceNames,
+          message: 'model is ready'
+        },
+        {
+          isConfigsPropertiesLoaded: false,
+          dependentServiceNames: [],
+          message: 'model is not ready'
+        }
+      ];
+
+    beforeEach(function () {
+      controller = App.MainServiceItemController.create({
+        content: {
+          serviceName: serviceName
+        }
+      });
+      sinon.stub(App.StackService, 'find').returns(Em.Object.create({
+        dependentServiceNames: dependentServiceNames
+      }));
+    });
+
+    afterEach(function () {
+      App.StackService.find.restore();
+    });
+
+    testCases.forEach(function (test) {
+
+      it(test.message, function () {
+        App.set('router.clusterController.isConfigsPropertiesLoaded', test.isConfigsPropertiesLoaded);
+        expect(controller.get('dependentServiceNames')).to.eql(test.dependentServiceNames);
+      });
+
+    });
+
+  });
 });


[46/50] [abbrv] ambari git commit: AMBARI-19981. Hive View 2.0: Enable Notifications. (dipayanb)

Posted by nc...@apache.org.
AMBARI-19981. Hive View 2.0: Enable Notifications. (dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6c4cbc4f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6c4cbc4f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6c4cbc4f

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 6c4cbc4f7efddd55e063dbff470fa04eee2e68bf
Parents: a6445ac
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Mon Feb 13 18:59:11 2017 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Mon Feb 13 18:59:54 2017 +0530

----------------------------------------------------------------------
 .../resources/ui/app/components/create-table.js |  6 +++-
 .../resources/ui/app/configs/file-format.js     |  4 ++-
 .../resources/ui/app/controllers/messages.js    | 30 ++++++++++++++++
 .../ui/app/controllers/messages/message.js      | 31 ++++++++++++++++
 .../app/helpers/alert-message-context-class.js  | 27 ++++++++++++++
 .../ui/app/helpers/alert-message-icon-class.js  | 37 ++++++++++++++++++++
 .../resources/ui/app/helpers/shorten-text.js    | 32 +++++++++++++++++
 .../main/resources/ui/app/mixins/ui-logger.js   | 15 ++++++++
 .../main/resources/ui/app/routes/databases.js   | 12 ++++---
 .../databases/database/tables/new-database.js   | 15 ++++----
 .../app/routes/databases/database/tables/new.js |  8 +++--
 .../routes/databases/database/tables/table.js   |  8 +++--
 .../databases/database/tables/table/edit.js     | 16 ++++-----
 .../databases/database/tables/table/rename.js   | 15 ++++----
 .../src/main/resources/ui/app/routes/jobs.js    |  3 --
 .../main/resources/ui/app/routes/settings.js    | 10 +++---
 .../resources/ui/app/services/alert-messages.js | 13 ++++---
 .../ui/app/templates/databases-loading.hbs      | 21 +++++++++++
 .../databases/database/tables-loading.hbs       | 24 +++++++++++++
 .../databases/database/tables/table-loading.hbs | 21 +++++++++++
 .../resources/ui/app/templates/jobs-loading.hbs | 20 +++++++++++
 .../resources/ui/app/templates/messages.hbs     | 14 +++-----
 .../ui/app/templates/messages/message.hbs       |  6 ++--
 .../ui/app/templates/savedqueries-loading.hbs   | 21 +++++++++++
 .../ui/app/templates/settings-loading.hbs       | 21 +++++++++++
 .../resources/ui/app/templates/udfs-loading.hbs | 21 +++++++++++
 .../src/main/resources/ui/config/environment.js |  2 +-
 27 files changed, 387 insertions(+), 66 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/components/create-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/create-table.js b/contrib/views/hive20/src/main/resources/ui/app/components/create-table.js
index f31d37f..670ebd7 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/create-table.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/create-table.js
@@ -18,13 +18,17 @@
 
 import Ember from 'ember';
 import Helper from '../configs/helpers';
+import FileFormats from '../configs/file-format';
 
 export default Ember.Component.extend({
   init() {
     this._super(...arguments);
+    let defaultFileFormat = FileFormats.findBy('default', true);
     this.set('columns', Ember.A());
     this.set('properties', []);
-    this.set('settings', {});
+    this.set('settings', {
+      fileFormat: { type: defaultFileFormat.name}
+    });
     this.set('shouldAddBuckets', null);
     this.set('settingErrors', []);
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/configs/file-format.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/configs/file-format.js b/contrib/views/hive20/src/main/resources/ui/app/configs/file-format.js
index 4042b63..afcba6e 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/configs/file-format.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/configs/file-format.js
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-export default [
+let fileFormats = [
   {name: "SEQUENCEFILE", default: false, custom: false},
   {name: "TEXTFILE", default: false, custom: false},
   {name: "RCFILE", default: false, custom: false},
@@ -24,3 +24,5 @@ export default [
   {name: "AVRO", default: false, custom: false},
   {name: "CUSTOM SerDe", default: false, custom: true},
 ];
+
+export default fileFormats;

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/controllers/messages.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/controllers/messages.js b/contrib/views/hive20/src/main/resources/ui/app/controllers/messages.js
new file mode 100644
index 0000000..10aa612
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/controllers/messages.js
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Controller.extend({
+  isExpanded: true,
+  shortenLength: Ember.computed('isExpanded', function() {
+    if(this.get('isExpanded') === true) {
+      return 200;
+    } else {
+      return 100;
+    }
+  })
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/controllers/messages/message.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/controllers/messages/message.js b/contrib/views/hive20/src/main/resources/ui/app/controllers/messages/message.js
new file mode 100644
index 0000000..d46c1f6
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/controllers/messages/message.js
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Controller.extend({
+
+  showStatus: Ember.computed('model', function() {
+    return this.get('model.status') !== -1;
+  }),
+
+  displayBody: Ember.computed('model', function() {
+    return !(Ember.isBlank(this.get('model.responseMessage'))
+      && Ember.isBlank(this.get('model.trace')));
+  })
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/helpers/alert-message-context-class.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/helpers/alert-message-context-class.js b/contrib/views/hive20/src/main/resources/ui/app/helpers/alert-message-context-class.js
new file mode 100644
index 0000000..28a5a8d
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/helpers/alert-message-context-class.js
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export function alertMessageContextClass(params) {
+  let messageType = params[0];
+  let prefix = params[1];
+  return `${prefix}${messageType}`;
+}
+
+export default Ember.Helper.helper(alertMessageContextClass);

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/helpers/alert-message-icon-class.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/helpers/alert-message-icon-class.js b/contrib/views/hive20/src/main/resources/ui/app/helpers/alert-message-icon-class.js
new file mode 100644
index 0000000..707f2d1
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/helpers/alert-message-icon-class.js
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export function alertMessageIconClass(params) {
+  let type = params[0];
+  switch (type) {
+    case 'success':
+      return 'check';
+    case 'info':
+      return 'info';
+    case 'warning':
+      return 'exclamation';
+    case 'danger':
+      return 'times';
+    default:
+      return 'check';
+  }
+}
+
+export default Ember.Helper.helper(alertMessageIconClass);

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/helpers/shorten-text.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/helpers/shorten-text.js b/contrib/views/hive20/src/main/resources/ui/app/helpers/shorten-text.js
new file mode 100644
index 0000000..c50b5ca
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/helpers/shorten-text.js
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export function shortenText(params) {
+  let text = params[0];
+  let length = params[1];
+  if (text.length < length) {
+    return text;
+  } else {
+    return text.substring(0, length - 3) + '...';
+  }
+
+}
+
+export default Ember.Helper.helper(shortenText);

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/mixins/ui-logger.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/mixins/ui-logger.js b/contrib/views/hive20/src/main/resources/ui/app/mixins/ui-logger.js
new file mode 100644
index 0000000..fb252d2
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/mixins/ui-logger.js
@@ -0,0 +1,15 @@
+import Ember from 'ember';
+
+export default Ember.Mixin.create({
+  logger: Ember.inject.service('alert-messages'),
+
+  extractError(error) {
+    if (Ember.isArray(error.errors) && (error.errors.length >= 0)) {
+      return error.errors[0];
+    } else if(!Ember.isEmpty(error.errors)) {
+      return error.errors;
+    } else {
+      return error;
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/routes/databases.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases.js
index 123a93f..6b0eab2 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases.js
@@ -17,8 +17,9 @@
  */
 
 import Ember from 'ember';
+import UILoggerMixin from '../mixins/ui-logger';
 
-export default Ember.Route.extend({
+export default Ember.Route.extend(UILoggerMixin, {
   tableOperations: Ember.inject.service(),
 
   model() {
@@ -93,7 +94,8 @@ export default Ember.Route.extend({
           this.controller.set('deleteDatabaseMessage', 'Waiting for the database to be deleted');
           this.get('tableOperations').waitForJobToComplete(job.get('id'), 5 * 1000)
             .then((status) => {
-              this.controller.set('deleteDatabaseMessage', "Successfully Deleted table");
+              this.controller.set('deleteDatabaseMessage', "Successfully deleted database");
+              this.get('logger').success(`Successfully deleted database '${databaseModel.get('name')}'`);
               Ember.run.later(() => {
                 this.store.unloadRecord(databaseModel);
                 this.controller.set('showDeleteDatabaseModal', false);
@@ -102,16 +104,16 @@ export default Ember.Route.extend({
                 this.refresh();
               }, 2 * 1000);
             }, (error) => {
-              // TODO: handle error
+              this.get('logger').danger(`Failed to delete database '${databaseModel.get('name')}'`, this.extractError(error));
               Ember.run.later(() => {
                 this.controller.set('showDeleteDatabaseModal', false);
                 this.controller.set('deleteDatabaseMessage');
                 this.replaceWith('databases');
                 this.refresh();
-              }, 2 * 1000);
+              }, 1 * 1000);
             });
         }, (error) => {
-          console.log("Error encountered", error);
+          this.get('logger').danger(`Failed to delete database '${databaseModel.get('name')}'`, this.extractError(error));
           this.controller.set('showDeleteDatabaseModal', false);
         });
     },

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new-database.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new-database.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new-database.js
index b421bdc..ad7fc99 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new-database.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new-database.js
@@ -17,8 +17,9 @@
  */
 
 import Ember from 'ember';
+import UILoggerMixin from '../../../../mixins/ui-logger';
 
-export default Ember.Route.extend({
+export default Ember.Route.extend(UILoggerMixin, {
 
   tableOperations: Ember.inject.service(),
 
@@ -40,10 +41,10 @@ export default Ember.Route.extend({
     }).then((status) => {
       this._modalStatus(true, 'Successfully created database');
       this._transitionToDatabases(newDatabaseName);
+      this.get('logger').success(`Successfully created database '${newDatabaseName}'`);
     }).catch((err) => {
-      this._modalStatus(true, 'Failed to create database');
-      this._alertMessage('Failed to create database', err);
-      this._transitionToDatabases();
+      this._modalStatus(false);
+      this.get('logger').danger(`Failed to create database '${newDatabaseName}'`, this.extractError(err));
     });
   },
 
@@ -59,10 +60,6 @@ export default Ember.Route.extend({
       this._modalStatus(false);
       this.transitionTo('databases');
     }, 2000);
-  },
-
-  _alertMessage(message, err) {
-    console.log(message, err);
-    // TODO: user alert message here
   }
+
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js
index c8ad239..b29d863 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js
@@ -18,8 +18,9 @@
 
 import Ember from 'ember';
 import tabs from '../../../../configs/create-table-tabs';
+import UILoggerMixin from '../../../../mixins/ui-logger';
 
-export default Ember.Route.extend({
+export default Ember.Route.extend(UILoggerMixin, {
   tableOperations: Ember.inject.service(),
 
   setupController(controller, model) {
@@ -44,6 +45,7 @@ export default Ember.Route.extend({
         return this.get('tableOperations').waitForJobToComplete(job.get('id'), 5 * 1000)
           .then((status) => {
             this.controller.set('createTableMessage', "Successfully created table");
+            this.get('logger').success(`Successfully created table '${settings.name}'`);
             Ember.run.later(() => {
             this.controller.set('showCreateTableModal', false);
             this.controller.set('createTableMessage');
@@ -55,7 +57,7 @@ export default Ember.Route.extend({
             }, 2 * 1000);
             return Ember.RSVP.Promise.resolve(job);
           }, (error) => {
-            // TODO: handle error
+            this.get('logger').danger(`Failed to create table '${settings.name}'`, this.extractError(error));
             Ember.run.later(() => {
               this.controller.set('showCreateTableModal', false);
               this.controller.set('createTableMessage');
@@ -67,7 +69,7 @@ export default Ember.Route.extend({
             return Ember.RSVP.Promise.reject(error);
           });
       }, (error) => {
-        console.log("Error encountered", error);
+        this.get('logger').danger(`Failed to create table '${settings.name}'`, this.extractError(error));
         this.controller.set('showCreateTableModal', true);
         throw error;
       });

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js
index 1066bc1..6ee8100 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js
@@ -18,8 +18,9 @@
 
 import Ember from 'ember';
 import tabs from '../../../../configs/table-level-tabs';
+import UILoggerMixin from '../../../../mixins/ui-logger';
 
-export default Ember.Route.extend({
+export default Ember.Route.extend(UILoggerMixin, {
   tableOperations: Ember.inject.service(),
   model(params) {
     let database = this.modelFor('databases.database').get('name');
@@ -65,6 +66,7 @@ export default Ember.Route.extend({
         this.get('tableOperations').waitForJobToComplete(job.get('id'), 5 * 1000)
           .then((status) => {
             this.controller.set('deleteTableMessage', "Successfully Deleted table");
+            this.get('logger').success(`Successfully deleted table '${tableInfo.get('table')}'`);
             Ember.run.later(() => {
               this.controller.set('showDeleteTableModal', false);
               this.controller.set('deleteTableMessage');
@@ -73,7 +75,7 @@ export default Ember.Route.extend({
               this.transitionTo('databases.database', databaseModel.get('name'));
             }, 2 * 1000);
           }, (error) => {
-            // TODO: handle error
+            this.get('logger').danger(`Failed to delete table '${tableInfo.get('table')}'`, this.extractError(error));
             Ember.run.later(() => {
               this.controller.set('showDeleteTableModal', false);
               this.controller.set('deleteTableMessage');
@@ -81,7 +83,7 @@ export default Ember.Route.extend({
             }, 2 * 1000);
           });
       }, (error) => {
-        console.log("Error encountered", error);
+        this.get('logger').danger(`Failed to delete table '${tableInfo.get('table')}'`, this.extractError(error));
         this.controller.set('showDeleteTableModal', true);
       });
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/edit.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/edit.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/edit.js
index 47340ba..d9f80e1 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/edit.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/edit.js
@@ -18,8 +18,9 @@
 
 import TableMetaRouter from './table-meta-router';
 import tabs from '../../../../../configs/edit-table-tabs';
+import UILoggerMixin from '../../../../../mixins/ui-logger';
 
-export default TableMetaRouter.extend({
+export default TableMetaRouter.extend(UILoggerMixin, {
 
   tableOperations: Ember.inject.service(),
 
@@ -51,12 +52,12 @@ export default TableMetaRouter.extend({
         this._modalStatus(true, 'Waiting for the table edit job to complete');
         return this.get('tableOperations').waitForJobToComplete(job.get('id'), 5 * 1000);
       }).then((status) => {
-        this._modalStatus(true, 'Successfully edited the table');
+        this._modalStatus(true, 'Successfully altered table');
+        this.get('logger').success(`Successfully altered table '${settings.table}'`);
         this._transitionToTables();
       }).catch((err) => {
-        this._modalStatus(true, 'Failed to edit table');
-        this._alertMessage('Failed to edit table', err);
-        this._transitionToTables();
+        this._modalStatus(false, 'Failed to edit table');
+        this.get('logger').danger(`Failed to  altered table '${settings.table}'`, this.extractError(err));
       });
     }
 
@@ -75,11 +76,6 @@ export default TableMetaRouter.extend({
       this.send('refreshTableInfo');
       this.transitionTo('databases.database.tables.table');
     }, 2000);
-  },
-
-  _alertMessage(message, err) {
-    console.log(message, err);
-    // TODO: user alert message here
   }
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/rename.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/rename.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/rename.js
index cac471e..30ebfc9 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/rename.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/rename.js
@@ -16,9 +16,11 @@
  * limitations under the License.
  */
 
+import Ember from 'ember';
 import TableMetaRouter from './table-meta-router';
+import UILoggerMixin from '../../../../../mixins/ui-logger';
 
-export default TableMetaRouter.extend({
+export default TableMetaRouter.extend(UILoggerMixin, {
 
   tableOperations: Ember.inject.service(),
 
@@ -52,11 +54,11 @@ export default TableMetaRouter.extend({
       return this.get('tableOperations').waitForJobToComplete(job.get('id'), 5 * 1000);
     }).then((status) => {
       this._modalStatus(true, 'Successfully renamed table');
+      this.get('logger').success(`Successfully renamed table '${oldTableName}' to '${newTableName}'`);
       this._transitionToTables();
     }).catch((err) => {
-      this._modalStatus(true, 'Failed to rename table');
-      this._alertMessage('Failed to rename table', err);
-      this._transitionToTables();
+      this._modalStatus(false, 'Failed to rename table');
+      this.get('logger').danger(`Failed to rename table '${oldTableName}' to '${newTableName}'`, this.extractError(err));
     });
   },
 
@@ -72,11 +74,6 @@ export default TableMetaRouter.extend({
       this._modalStatus(false);
       this.transitionTo('databases');
     }, 2000);
-  },
-
-  _alertMessage(message, err) {
-    console.log(message, err);
-    // TODO: user alert message here
   }
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js
index 419fd07..c27c74d 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js
@@ -63,9 +63,6 @@ export default Ember.Route.extend({
       this.controller.set('startTime', this.get('moment').moment(startTime, 'YYYY-MM-DD').startOf('day').valueOf())
       this.controller.set('endTime', this.get('moment').moment(endTime, 'YYYY-MM-DD').endOf('day').valueOf())
       this.refresh();
-    },
-    hideDatePicker() {
-      console.log("Hiddennnnn");
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/routes/settings.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/settings.js b/contrib/views/hive20/src/main/resources/ui/app/routes/settings.js
index 1ce5116..affc126 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/settings.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/settings.js
@@ -17,9 +17,10 @@
  */
 
 import Ember from 'ember';
-import hiveParams from '../configs/hive-parameters'
+import hiveParams from '../configs/hive-parameters';
+import UILoggerMixin from '../mixins/ui-logger';
 
-export default Ember.Route.extend({
+export default Ember.Route.extend(UILoggerMixin, {
   model() {
     return this.store.findAll('setting').then(settings => settings.toArray());
   },
@@ -63,16 +64,15 @@ export default Ember.Route.extend({
         let model = this.get('controller.model');
         model.removeObject(data);
       }, err => {
-        console.log('error in deletion');
+        this.get('logger').danger(`Failed to delete setting with key: '${setting.get('key')}`, this.extractError(err));
       })
     },
 
     updateAction(newSetting) {
       newSetting.save().then(data => {
-        console.log('saved', data);
         data.set('editMode', false);
       }, error => {
-        console.log('error', err);
+        this.get('logger').danger(`Failed to update setting with key: '${setting.get('key')}`, this.extractError(error));
       })
     },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/services/alert-messages.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/alert-messages.js b/contrib/views/hive20/src/main/resources/ui/app/services/alert-messages.js
index ed4cff1..a05fc7a 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/services/alert-messages.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/alert-messages.js
@@ -69,6 +69,10 @@ export default Ember.Service.extend({
     this._processMessage('danger', message, options, alertOptions);
   },
 
+  error: function() {
+    this.danger(...arguments);
+  },
+
   clearMessages: function() {
     this.get('flashMessages').clearMessages();
   },
@@ -82,13 +86,13 @@ export default Ember.Service.extend({
     }
     switch (type) {
       case 'success':
-        this.get('flashMessages').success(message, this._getOptions(alertOptions));
+        this.get('flashMessages').success(message, this._getOptions(Ember.merge(alertOptions, {sticky: false})));
         break;
       case 'warn':
-        this.get('flashMessages').warning(message, this._getOptions(alertOptions));
+        this.get('flashMessages').warning(message, this._getOptions(Ember.merge(alertOptions, {sticky: false})));
         break;
       case 'info':
-        this.get('flashMessages').info(message, this._getOptions(alertOptions));
+        this.get('flashMessages').info(message, this._getOptions(Ember.merge(alertOptions, {sticky: false})));
         break;
       case 'danger':
         this.get('flashMessages').danger(message, this._getOptions(alertOptions));
@@ -126,7 +130,8 @@ export default Ember.Service.extend({
     var defaultOptions = {
       priority: 100,
       showProgress: true,
-      timeout: 6000
+      timeout: 6000,
+      sticky: true
     };
     return Ember.merge(defaultOptions, options);
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/templates/databases-loading.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases-loading.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases-loading.hbs
new file mode 100644
index 0000000..d0592ed
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases-loading.hbs
@@ -0,0 +1,21 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="alert alert-info text-center">
+  <p class="lead">Loading Databases. Please wait. {{fa-icon "refresh" spin=true}}</p>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables-loading.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables-loading.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables-loading.hbs
new file mode 100644
index 0000000..58c36e9
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables-loading.hbs
@@ -0,0 +1,24 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="alert alert-info text-center">
+  <p class="lead">Loading Tables. Please wait. {{fa-icon "refresh" spin=true}}</p>
+</div>
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table-loading.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table-loading.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table-loading.hbs
new file mode 100644
index 0000000..536d025
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table-loading.hbs
@@ -0,0 +1,21 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="alert alert-info text-center">
+  <p class="lead">Loading Table Information. Please wait. {{fa-icon "refresh" spin=true}}</p>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/templates/jobs-loading.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/jobs-loading.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/jobs-loading.hbs
new file mode 100644
index 0000000..f679709
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/jobs-loading.hbs
@@ -0,0 +1,20 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+<div class="alert alert-info text-center">
+  <p class="lead">Loading Jobs. Please wait. {{fa-icon "refresh" spin=true}}</p>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/templates/messages.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/messages.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/messages.hbs
index b856c20..52e8d98 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/messages.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/messages.hbs
@@ -17,12 +17,9 @@
 }}
 
 <div class="row">
-  <div class="col-md-12 messages-header">
-    <div class="col-md-1">
-      {{!--#link-to "files" (query-params path=currentBrowserPath) class="btn btn-primary"}}{{fa-icon "arrow-left"}} Browser{{/link-to--}}
-    </div>
-    <div class="col-md-2 col-md-offset-4 text-center">
-      <span class="messages-title">{{fa-icon "comment"}} Messages</span>
+  <div class="col-md-12 messages-header text-center">
+    <div class="alert alert-info">
+      <p class="lead">{{fa-icon "comment" size="lg" }} Notification Messages</p>
     </div>
   </div>
 </div>
@@ -30,7 +27,8 @@
   <div class={{if isExpanded "col-md-12" "col-md-4"}}>
     <div class="list-group">
       {{#each model as |message|}}
-        {{#link-to 'messages.message' message class=(alert-message-context-class message.type "list-group-item list-group-item-")}}
+        {{#link-to 'messages.message' message
+                   class=(alert-message-context-class message.type "list-group-item list-group-item-")}}
           <h4 class="list-group-item-heading wrap-message">
             {{#fa-stack}}
               {{fa-icon "circle-thin" stack=2}}
@@ -39,8 +37,6 @@
             {{{message.message}}}</h4>
           <p class="list-group-item-text wrap-message">{{shorten-text message.responseMessage shortenLength}}</p>
         {{/link-to}}
-      {{else}}
-        No messages present
       {{/each}}
     </div>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/templates/messages/message.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/messages/message.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/messages/message.hbs
index 0c69d58..5b82333 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/messages/message.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/messages/message.hbs
@@ -27,18 +27,18 @@
   {{#if displayBody}}
     <div class="panel-body">
       {{#if showStatus}}
-        <p><strong>Server status:</strong> {{model.status}}</p>
+        <p><strong>Status:</strong> {{model.status}}</p>
         <hr/>
       {{/if}}
       {{#if model.responseMessage}}
-        {{alert-message-display title="Server Message:"
+        {{alert-message-display title="Message:"
         value=model.responseMessage
         shorten=true
         length=200}}
         <hr/>
       {{/if}}
       {{#if model.trace}}
-        {{alert-message-display title="Error trace:"
+        {{alert-message-display title="Trace:"
         value=model.trace
         shorten=true
         length=500}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries-loading.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries-loading.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries-loading.hbs
new file mode 100644
index 0000000..571466b
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries-loading.hbs
@@ -0,0 +1,21 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="alert alert-info text-center">
+  <p class="lead">Loading Saved Queries. Please wait. {{fa-icon "refresh" spin=true}}</p>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/templates/settings-loading.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/settings-loading.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/settings-loading.hbs
new file mode 100644
index 0000000..253bbb1
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/settings-loading.hbs
@@ -0,0 +1,21 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="alert alert-info text-center">
+  <p class="lead">Loading Settings. Please wait. {{fa-icon "refresh" spin=true}}</p>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/app/templates/udfs-loading.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/udfs-loading.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/udfs-loading.hbs
new file mode 100644
index 0000000..3b74db8
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/udfs-loading.hbs
@@ -0,0 +1,21 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="alert alert-info text-center">
+  <p class="lead">Loading Udfs. Please wait. {{fa-icon "refresh" spin=true}}</p>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c4cbc4f/contrib/views/hive20/src/main/resources/ui/config/environment.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/config/environment.js b/contrib/views/hive20/src/main/resources/ui/config/environment.js
index 3714eb9..411ee99 100644
--- a/contrib/views/hive20/src/main/resources/ui/config/environment.js
+++ b/contrib/views/hive20/src/main/resources/ui/config/environment.js
@@ -48,7 +48,7 @@ module.exports = function(environment) {
 
     // Change the value to false to prevent the service checks. This is required in development mode
     // as service checks take up time and hence increase the overall development time.
-    ENV.APP.SHOULD_PERFORM_SERVICE_CHECK = true;
+    ENV.APP.SHOULD_PERFORM_SERVICE_CHECK = false;
   }
 
   if (environment === 'test') {


[11/50] [abbrv] ambari git commit: AMBARI-19949. Hive View 2.0: Introduce validation for expression precision >= scale. (dipayanb)

Posted by nc...@apache.org.
AMBARI-19949. Hive View 2.0: Introduce validation for expression precision >= scale. (dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/232b585b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/232b585b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/232b585b

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 232b585bdd0350f5b2fb8c2c7ebf200cd3286fa7
Parents: 09e35e1
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Fri Feb 10 15:09:29 2017 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Fri Feb 10 15:10:17 2017 +0530

----------------------------------------------------------------------
 contrib/views/hive20/src/main/resources/ui/app/models/column.js | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/232b585b/contrib/views/hive20/src/main/resources/ui/app/models/column.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/column.js b/contrib/views/hive20/src/main/resources/ui/app/models/column.js
index 1d9ccce..f38309b 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/models/column.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/column.js
@@ -87,6 +87,8 @@ let Column = Ember.Object.extend(Ember.Copyable,{
         this.get('errors').pushObject({type: 'precision', error: "Precision can only be a number"});
       } else if(this.get('precision') <= 0) {
         this.get('errors').pushObject({type: 'precision', error: "Precision can only be greater than zero"});
+      } else if(this.get('type.hasScale') && this.get('scale') && (this.get('precision') < this.get('scale'))) {
+        this.get('errors').pushObject({type: 'precision', error: "Precision can only be greater than scale"});
       }
 
     }


[23/50] [abbrv] ambari git commit: AMBARI-19904. Upgrade: Package Install stuck in Installing state forever. (stoader)

Posted by nc...@apache.org.
AMBARI-19904. Upgrade: Package Install stuck in Installing state forever. (stoader)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/33caec24
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/33caec24
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/33caec24

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 33caec24a8742666a7190f38f5ebd71c54b2a405
Parents: a0ba7fb
Author: Toader, Sebastian <st...@hortonworks.com>
Authored: Fri Feb 10 18:38:37 2017 +0100
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Fri Feb 10 18:38:37 2017 +0100

----------------------------------------------------------------------
 .../server/actionmanager/ActionDBAccessor.java  |  5 +-
 .../actionmanager/ActionDBAccessorImpl.java     |  6 +-
 .../server/actionmanager/ActionScheduler.java   | 15 ++---
 .../actionmanager/TestActionScheduler.java      | 65 +++++++++++++++-----
 4 files changed, 66 insertions(+), 25 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/33caec24/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessor.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessor.java
index 8aef70d..217fe0a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessor.java
@@ -24,6 +24,7 @@ import java.util.Map;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.agent.CommandReport;
 import org.apache.ambari.server.agent.ExecutionCommand;
+import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
 import org.apache.ambari.server.orm.entities.RequestEntity;
 
 public interface ActionDBAccessor {
@@ -58,8 +59,10 @@ public interface ActionDBAccessor {
    * Abort all outstanding operations associated with the given request. This
    * method uses the {@link HostRoleStatus#SCHEDULED_STATES} to determine which
    * {@link HostRoleCommand} instances to abort.
+   *
+   * Returns the list of the aborted operations.
    */
-  public void abortOperation(long requestId);
+  public Collection<HostRoleCommandEntity> abortOperation(long requestId);
 
   /**
    * Mark the task as to have timed out

http://git-wip-us.apache.org/repos/asf/ambari/blob/33caec24/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
index 7837a7b..7881a4b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
@@ -202,7 +202,7 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
    * {@inheritDoc}
    */
   @Override
-  public void abortOperation(long requestId) {
+  public Collection<HostRoleCommandEntity> abortOperation(long requestId) {
     long now = System.currentTimeMillis();
 
     endRequest(requestId);
@@ -226,8 +226,10 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
 
     // no need to merge if there's nothing to merge
     if (!commands.isEmpty()) {
-      hostRoleCommandDAO.mergeAll(commands);
+      return hostRoleCommandDAO.mergeAll(commands);
     }
+
+    return Collections.emptyList();
   }
 
   /* (non-Javadoc)

http://git-wip-us.apache.org/repos/asf/ambari/blob/33caec24/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
index fa2ad4f..680c0a6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
@@ -929,15 +929,16 @@ class ActionScheduler implements Runnable {
         ExecutionCommand c = wrapper.getExecutionCommand();
         transitionToFailedState(stage.getClusterName(), c.getServiceName(),
                 c.getRole(), hostName, now, true);
-        if (c.getRoleCommand().equals(RoleCommand.ACTIONEXECUTE)) {
-          String clusterName = c.getClusterName();
-          processActionDeath(clusterName,
-                  c.getHostname(),
-                  c.getRole());
-        }
       }
     }
-    db.abortOperation(stage.getRequestId());
+    Collection<HostRoleCommandEntity> abortedOperations = db.abortOperation(stage.getRequestId());
+
+    for (HostRoleCommandEntity command: abortedOperations) {
+      if (command.getRoleCommand().equals(RoleCommand.ACTIONEXECUTE)) {
+        String clusterName = stage.getClusterName();
+        processActionDeath(clusterName, command.getHostName(), command.getRole().name());
+      }
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/33caec24/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
index 653ad2c..6519126 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
@@ -109,6 +109,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.Lists;
 import com.google.common.collect.Multimap;
 import com.google.common.reflect.TypeToken;
 import com.google.inject.AbstractModule;
@@ -508,10 +509,11 @@ public class TestActionScheduler {
     when(db.getCommandsInProgressCount()).thenReturn(stages.size());
     when(db.getStagesInProgress()).thenReturn(stages);
 
-    doAnswer(new Answer<Void>() {
+    doAnswer(new Answer<Collection<HostRoleCommandEntity>>() {
       @Override
-      public Void answer(InvocationOnMock invocation) throws Throwable {
+      public Collection<HostRoleCommandEntity> answer(InvocationOnMock invocation) throws Throwable {
         Long requestId = (Long) invocation.getArguments()[1];
+        List<HostRoleCommandEntity> abortedCommands = Lists.newArrayList();
         for (Stage stage : stages) {
           if (requestId.equals(stage.getRequestId())) {
             for (HostRoleCommand command : stage.getOrderedHostRoleCommands()) {
@@ -519,12 +521,17 @@ public class TestActionScheduler {
                 command.getStatus() == HostRoleStatus.IN_PROGRESS ||
                 command.getStatus() == HostRoleStatus.PENDING) {
                 command.setStatus(HostRoleStatus.ABORTED);
+
+                HostRoleCommandEntity hostRoleCommandEntity = command.constructNewPersistenceEntity();
+                hostRoleCommandEntity.setStage(stage.constructNewPersistenceEntity());
+
+                abortedCommands.add(hostRoleCommandEntity);
               }
             }
           }
         }
 
-        return null;
+        return abortedCommands;
       }
     }).when(db).abortHostRole(anyString(), anyLong(), anyLong(), anyString(), anyString());
 
@@ -1390,10 +1397,12 @@ public class TestActionScheduler {
         return null;
       }
     });
-    doAnswer(new Answer<Void>() {
+    doAnswer(new Answer<Collection<HostRoleCommandEntity>>() {
       @Override
-      public Void answer(InvocationOnMock invocation) throws Throwable {
+      public Collection<HostRoleCommandEntity> answer(InvocationOnMock invocation) throws Throwable {
         Long requestId = (Long) invocation.getArguments()[0];
+        List<HostRoleCommandEntity> abortedCommands = Lists.newArrayList();
+
         for (Stage stage : stages) {
           if (requestId.equals(stage.getRequestId())) {
             for (HostRoleCommand command : stage.getOrderedHostRoleCommands()) {
@@ -1401,12 +1410,17 @@ public class TestActionScheduler {
                   command.getStatus() == HostRoleStatus.IN_PROGRESS ||
                   command.getStatus() == HostRoleStatus.PENDING) {
                 command.setStatus(HostRoleStatus.ABORTED);
+
+                HostRoleCommandEntity hostRoleCommandEntity = command.constructNewPersistenceEntity();
+                hostRoleCommandEntity.setStage(stage.constructNewPersistenceEntity());
+
+                abortedCommands.add(hostRoleCommandEntity);
               }
             }
           }
         }
 
-        return null;
+        return abortedCommands;
       }
     }).when(db).abortOperation(anyLong());
 
@@ -1570,10 +1584,12 @@ public class TestActionScheduler {
         return null;
       }
     });
-    doAnswer(new Answer<Void>() {
+    doAnswer(new Answer<Collection<HostRoleCommandEntity>>() {
       @Override
-      public Void answer(InvocationOnMock invocation) throws Throwable {
+      public Collection<HostRoleCommandEntity> answer(InvocationOnMock invocation) throws Throwable {
         Long requestId = (Long) invocation.getArguments()[0];
+        List<HostRoleCommandEntity> abortedCommands = Lists.newArrayList();
+
         for (Stage stage : stages) {
           if (requestId.equals(stage.getRequestId())) {
             for (HostRoleCommand command : stage.getOrderedHostRoleCommands()) {
@@ -1581,12 +1597,17 @@ public class TestActionScheduler {
                   command.getStatus() == HostRoleStatus.IN_PROGRESS ||
                   command.getStatus() == HostRoleStatus.PENDING) {
                 command.setStatus(HostRoleStatus.ABORTED);
+
+                HostRoleCommandEntity hostRoleCommandEntity = command.constructNewPersistenceEntity();
+                hostRoleCommandEntity.setStage(stage.constructNewPersistenceEntity());
+
+                abortedCommands.add(hostRoleCommandEntity);
               }
             }
           }
         }
 
-        return null;
+        return abortedCommands;
       }
     }).when(db).abortOperation(anyLong());
 
@@ -1755,10 +1776,12 @@ public class TestActionScheduler {
         return null;
       }
     });
-    doAnswer(new Answer<Void>() {
+    doAnswer(new Answer<Collection<HostRoleCommandEntity>>() {
       @Override
-      public Void answer(InvocationOnMock invocation) throws Throwable {
+      public Collection<HostRoleCommandEntity> answer(InvocationOnMock invocation) throws Throwable {
         Long requestId = (Long) invocation.getArguments()[0];
+        List<HostRoleCommandEntity> abortedCommands = Lists.newArrayList();
+
         for (Stage stage : stages) {
           if (requestId.equals(stage.getRequestId())) {
             for (HostRoleCommand command : stage.getOrderedHostRoleCommands()) {
@@ -1766,12 +1789,17 @@ public class TestActionScheduler {
                   command.getStatus() == HostRoleStatus.IN_PROGRESS ||
                   command.getStatus() == HostRoleStatus.PENDING) {
                 command.setStatus(HostRoleStatus.ABORTED);
+
+                HostRoleCommandEntity hostRoleCommandEntity = command.constructNewPersistenceEntity();
+                hostRoleCommandEntity.setStage(stage.constructNewPersistenceEntity());
+
+                abortedCommands.add(hostRoleCommandEntity);
               }
             }
           }
         }
 
-        return null;
+        return abortedCommands;
       }
     }).when(db).abortOperation(anyLong());
 
@@ -2316,10 +2344,12 @@ public class TestActionScheduler {
       }
     });
 
-    doAnswer(new Answer<Void>() {
+    doAnswer(new Answer<Collection<HostRoleCommandEntity>>() {
       @Override
-      public Void answer(InvocationOnMock invocation) throws Throwable {
+      public Collection<HostRoleCommandEntity> answer(InvocationOnMock invocation) throws Throwable {
         Long requestId = (Long) invocation.getArguments()[0];
+        List<HostRoleCommandEntity> abortedCommands = Lists.newArrayList();
+
         for (Stage stage : stagesInProgress) {
           if (requestId.equals(stage.getRequestId())) {
             for (HostRoleCommand command : stage.getOrderedHostRoleCommands()) {
@@ -2327,12 +2357,17 @@ public class TestActionScheduler {
                       command.getStatus() == HostRoleStatus.IN_PROGRESS ||
                       command.getStatus() == HostRoleStatus.PENDING) {
                 command.setStatus(HostRoleStatus.ABORTED);
+
+                HostRoleCommandEntity hostRoleCommandEntity = command.constructNewPersistenceEntity();
+                hostRoleCommandEntity.setStage(stage.constructNewPersistenceEntity());
+
+                abortedCommands.add(hostRoleCommandEntity);
               }
             }
           }
         }
 
-        return null;
+        return abortedCommands;
       }
     }).when(db).abortOperation(anyLong());
 


[38/50] [abbrv] ambari git commit: AMBARI-19970 : AMS graphs are not present on cluster with SSL. (avijayan)

Posted by nc...@apache.org.
AMBARI-19970 : AMS graphs are not present on cluster with SSL. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6eac0f57
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6eac0f57
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6eac0f57

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 6eac0f57a61da4e76b175f0de43ab315a665b9c2
Parents: d510063
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Sun Feb 12 08:34:41 2017 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Sun Feb 12 08:34:58 2017 -0800

----------------------------------------------------------------------
 .../conf/hadoop-metrics2-hbase.properties.j2    | 49 -----------------
 .../src/main/conf/hadoop-metrics2.properties.j2 | 58 --------------------
 .../hadoop-metrics2-hbase.properties.j2         |  4 ++
 3 files changed, 4 insertions(+), 107 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6eac0f57/ambari-metrics/ambari-metrics-hadoop-sink/src/main/conf/hadoop-metrics2-hbase.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/conf/hadoop-metrics2-hbase.properties.j2 b/ambari-metrics/ambari-metrics-hadoop-sink/src/main/conf/hadoop-metrics2-hbase.properties.j2
deleted file mode 100644
index c819301..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/conf/hadoop-metrics2-hbase.properties.j2
+++ /dev/null
@@ -1,49 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# See http://wiki.apache.org/hadoop/GangliaMetrics
-#
-# Make sure you know whether you are using ganglia 3.0 or 3.1.
-# If 3.1, you will have to patch your hadoop instance with HADOOP-4675
-# And, yes, this file is named hadoop-metrics.properties rather than
-# hbase-metrics.properties because we're leveraging the hadoop metrics
-# package and hadoop-metrics.properties is an hardcoded-name, at least
-# for the moment.
-#
-# See also http://hadoop.apache.org/hbase/docs/current/metrics.html
-
-# HBase-specific configuration to reset long-running stats (e.g. compactions)
-# If this variable is left out, then the default is no expiration.
-hbase.extendedperiod = 3600
-
-# Configuration of the "hbase" context for timeline metrics service
-hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-hbase.period=10
-hbase.collector={{timeline_server_hosts}}:6188
-
-# Configuration of the "jvm" context for timeline metrics service
-jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-jvm.period=10
-jvm.collector={{timeline_server_hosts}}:6188
-
-# Configuration of the "rpc" context for timeline metrics service
-rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-rpc.period=10
-rpc.collector={{timeline_server_hosts}}:6188
-
-# Following hadoop example
-hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-hbase.sink.timeline.period=10
-hbase.sink.timeline.collector=http://{{timeline_server_hosts}}:6188
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6eac0f57/ambari-metrics/ambari-metrics-hadoop-sink/src/main/conf/hadoop-metrics2.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/conf/hadoop-metrics2.properties.j2 b/ambari-metrics/ambari-metrics-hadoop-sink/src/main/conf/hadoop-metrics2.properties.j2
deleted file mode 100644
index ec1377d..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/conf/hadoop-metrics2.properties.j2
+++ /dev/null
@@ -1,58 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# syntax: [prefix].[source|sink|jmx].[instance].[options]
-# See package.html for org.apache.hadoop.metrics2 for details
-
-{% if has_ganglia_server %}
-*.period=60
-
-*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-*.sink.timeline.period=10
-
-
-# Hook up to the server
-datanode.sink.timeline.collector=http://localhost:6188
-namenode.sink.timeline.collector=http://localhost:6188
-resourcemanager.sink.timeline.collector=http://localhost:6188
-nodemanager.sink.timeline.collector=http://localhost:6188
-historyserver.sink.timeline.collector=http://localhost:6188
-journalnode.sink.timeline.collector=http://localhost:6188
-nimbus.sink.timeline.collector=http://localhost:6188
-supervisor.sink.timeline.collector=http://localhost:6188
-maptask.sink.timeline.collector=http://localhost:6188
-reducetask.sink.timeline.collector=http://localhost:6188
-
-resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue
-
-{% endif %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/6eac0f57/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
index 8c20f2b..8c6f86f 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
@@ -42,16 +42,19 @@ hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 hbase.period=30
 hbase.collector.hosts={{ams_collector_hosts}}
 hbase.port={{metric_collector_port}}
+hbase.protocol={{metric_collector_protocol}}
 
 jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 jvm.period=30
 jvm.collector.hosts={{ams_collector_hosts}}
 jvm.port={{metric_collector_port}}
+jvm.protocol={{metric_collector_protocol}}
 
 rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 rpc.period=30
 rpc.collector.hosts={{ams_collector_hosts}}
 rpc.port={{metric_collector_port}}
+rpc.protocol={{metric_collector_protocol}}
 
 *.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
 *.sink.timeline.slave.host.name={{hostname}}
@@ -60,6 +63,7 @@ hbase.sink.timeline.period={{metrics_collection_period}}
 hbase.sink.timeline.sendInterval={{metrics_report_interval}}000
 hbase.sink.timeline.collector.hosts={{ams_collector_hosts}}
 hbase.sink.timeline.port={{metric_collector_port}}
+hbase.sink.timeline.protocol={{metric_collector_protocol}}
 hbase.sink.timeline.serviceName-prefix=ams
 
 # HTTPS properties


[22/50] [abbrv] ambari git commit: AMBARI-19577. Add Livy session recovery configurations in Ambari (Saisai Shao via smohanty)

Posted by nc...@apache.org.
AMBARI-19577. Add Livy session recovery configurations in Ambari (Saisai Shao via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a0ba7fbe
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a0ba7fbe
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a0ba7fbe

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: a0ba7fbe7f80332bcff1cab9ea7dbe27ca59a003
Parents: eecb5a3
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Fri Feb 10 08:36:58 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Fri Feb 10 08:42:42 2017 -0800

----------------------------------------------------------------------
 .../SPARK/1.2.1/package/scripts/params.py       |  1 +
 .../SPARK/1.2.1/package/scripts/setup_livy.py   |  8 +++++
 .../SPARK2/2.0.0/package/scripts/params.py      |  1 +
 .../SPARK2/2.0.0/package/scripts/setup_livy2.py |  8 +++++
 .../services/SPARK/configuration/livy-conf.xml  | 24 +++++++++++++++
 .../SPARK2/configuration/livy2-conf.xml         | 24 +++++++++++++++
 .../python/stacks/2.5/SPARK/test_spark_livy.py  | 31 ++++++++++++++++++++
 .../stacks/2.6/SPARK2/test_spark_livy2.py       | 31 ++++++++++++++++++++
 8 files changed, 128 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a0ba7fbe/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
index 82cd0b2..6a59caf 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
@@ -222,6 +222,7 @@ if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, stac
   user_group = status_params.user_group
   livy_hdfs_user_dir = format("/user/{livy_user}")
   livy_server_pid_file = status_params.livy_server_pid_file
+  livy_recovery_dir = default("/configurations/livy-conf/livy.server.recovery.state-store.url", "/livy-recovery")
 
   livy_server_start = format("{livy_home}/bin/livy-server start")
   livy_server_stop = format("{livy_home}/bin/livy-server stop")

http://git-wip-us.apache.org/repos/asf/ambari/blob/a0ba7fbe/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py
index 07b704f..32615c3 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py
@@ -40,6 +40,14 @@ def setup_livy(env, type, upgrade_type = None, action = None):
     )
     params.HdfsResource(None, action="execute")
 
+    params.HdfsResource(params.livy_recovery_dir,
+                       type="directory",
+                       action="create_on_execute",
+                       owner=params.livy_user,
+                       mode=0775
+    )
+    params.HdfsResource(None, action="execute")
+
   # create livy-env.sh in etc/conf dir
   File(os.path.join(params.livy_conf, 'livy-env.sh'),
        owner=params.livy_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/a0ba7fbe/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
index c19f88c..b6889e4 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
@@ -207,6 +207,7 @@ if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY2, sta
   user_group = status_params.user_group
   livy2_hdfs_user_dir = format("/user/{livy2_user}")
   livy2_server_pid_file = status_params.livy2_server_pid_file
+  livy2_recovery_dir = default("/configurations/livy2-conf/livy.server.recovery.state-store.url", "/livy2-recovery")
 
   livy2_server_start = format("{livy2_home}/bin/livy-server start")
   livy2_server_stop = format("{livy2_home}/bin/livy-server stop")

http://git-wip-us.apache.org/repos/asf/ambari/blob/a0ba7fbe/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_livy2.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_livy2.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_livy2.py
index f2178f7..2e92509 100644
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_livy2.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_livy2.py
@@ -40,6 +40,14 @@ def setup_livy(env, type, upgrade_type = None, action = None):
     )
     params.HdfsResource(None, action="execute")
 
+    params.HdfsResource(params.livy2_recovery_dir,
+                        type="directory",
+                        action="create_on_execute",
+                        owner=params.livy2_user,
+                        mode=0775
+       )
+    params.HdfsResource(None, action="execute")
+
   # create livy-env.sh in etc/conf dir
   File(os.path.join(params.livy2_conf, 'livy-env.sh'),
        owner=params.livy2_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/a0ba7fbe/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK/configuration/livy-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK/configuration/livy-conf.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK/configuration/livy-conf.xml
index 23583c0..28eeab5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK/configuration/livy-conf.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK/configuration/livy-conf.xml
@@ -78,4 +78,28 @@
      </description>
      <on-ambari-upgrade add="true"/>
   </property>
+  <property>
+    <name>livy.server.recovery.mode</name>
+    <value>recovery</value>
+    <description>
+          Recovery mode for livy, either be "off" or "recovery".
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>livy.server.recovery.state-store</name>
+    <value>filesystem</value>
+    <description>
+          Where Livy should store state for recovery.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>livy.server.recovery.state-store.url</name>
+    <value>/livy-recovery</value>
+    <description>
+          Where Livy should store state for recovery.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/a0ba7fbe/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml
index 231ef70..c8a65bd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml
@@ -85,6 +85,30 @@
             Whether to enable HiveContext in livy interpreter
         </description>
         <on-ambari-upgrade add="false"/>
+    <property>
+    </property>
+        <name>livy.server.recovery.mode</name>
+        <value>recovery</value>
+        <description>
+            Recovery mode for livy, either be "off" or "recovery".
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.server.recovery.state-store</name>
+        <value>filesystem</value>
+        <description>
+            Where Livy should store state for recovery.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.server.recovery.state-store.url</name>
+        <value>/livy2-recovery</value>
+        <description>
+            Where Livy should store state for recovery.
+        </description>
+        <on-ambari-upgrade add="false"/>
     </property>
 </configuration>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a0ba7fbe/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py b/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py
index b56474a..b9199c7 100644
--- a/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py
+++ b/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py
@@ -84,6 +84,37 @@ class TestSparkClient(RMFTestCase):
                                   dfs_type = '',
                                   hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
                                   )
+        self.assertResourceCalled('HdfsResource', '/livy-recovery',
+                                  immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+                                  security_enabled = False,
+                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  keytab = UnknownConfigurationMock(),
+                                  default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+                                  hdfs_site = {u'a': u'b'},
+                                  kinit_path_local = '/usr/bin/kinit',
+                                  principal_name = UnknownConfigurationMock(),
+                                  user = 'hdfs',
+                                  owner = 'livy',
+                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  type = 'directory',
+                                  action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                                  dfs_type = '',
+                                  mode = 0775,
+                                  )
+        self.assertResourceCalled('HdfsResource', None,
+                                  immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+                                  security_enabled = False,
+                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  keytab = UnknownConfigurationMock(),
+                                  default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+                                  hdfs_site = {u'a': u'b'},
+                                  kinit_path_local = '/usr/bin/kinit',
+                                  principal_name = UnknownConfigurationMock(),
+                                  user = 'hdfs',
+                                  action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                                  dfs_type = '',
+                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  )
         self.assertResourceCalled('File', '/usr/hdp/current/livy-server/conf/livy-env.sh',
                                   content = InlineTemplate(self.getConfig()['configurations']['livy-env']['content']),
                                   owner = 'livy',

http://git-wip-us.apache.org/repos/asf/ambari/blob/a0ba7fbe/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py b/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
index 6244d87..75aec84 100644
--- a/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
+++ b/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
@@ -84,6 +84,37 @@ class TestSparkClient(RMFTestCase):
                                   dfs_type = '',
                                   hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
                                   )
+        self.assertResourceCalled('HdfsResource', '/livy2-recovery',
+                                  immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+                                  security_enabled = False,
+                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  keytab = UnknownConfigurationMock(),
+                                  default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+                                  hdfs_site = {u'a': u'b'},
+                                  kinit_path_local = '/usr/bin/kinit',
+                                  principal_name = UnknownConfigurationMock(),
+                                  user = 'hdfs',
+                                  owner = 'livy',
+                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  type = 'directory',
+                                  action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                                  dfs_type = '',
+                                  mode = 0775,
+                                  )
+        self.assertResourceCalled('HdfsResource', None,
+                                  immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+                                  security_enabled = False,
+                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  keytab = UnknownConfigurationMock(),
+                                  default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+                                  hdfs_site = {u'a': u'b'},
+                                  kinit_path_local = '/usr/bin/kinit',
+                                  principal_name = UnknownConfigurationMock(),
+                                  user = 'hdfs',
+                                  action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                                  dfs_type = '',
+                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  )
         self.assertResourceCalled('File', '/usr/hdp/current/livy2-server/conf/livy-env.sh',
                                   content = InlineTemplate(self.getConfig()['configurations']['livy2-env']['content']),
                                   owner = 'livy',


[37/50] [abbrv] ambari git commit: AMBARI-19884. Set a higher value of num aggregated files per container - HDP stack, YARN (Siddharth Seth via smohanty)

Posted by nc...@apache.org.
AMBARI-19884. Set a higher value of num aggregated files per container - HDP stack, YARN (Siddharth Seth via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d5100632
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d5100632
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d5100632

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d5100632624a671512ea44e6ad8f6ee9df3dba24
Parents: d6e0b26
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Sat Feb 11 16:09:15 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Sat Feb 11 16:09:48 2017 -0800

----------------------------------------------------------------------
 .../HDP/2.5/services/YARN/configuration/yarn-site.xml       | 6 ++++++
 .../resources/stacks/HDP/2.5/upgrades/config-upgrade.xml    | 5 +++++
 .../stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml      | 9 ++++++++-
 .../main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml  | 1 +
 4 files changed, 20 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d5100632/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/configuration/yarn-site.xml
index 90b2243..b6fadcb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/configuration/yarn-site.xml
@@ -47,6 +47,12 @@
     <on-ambari-upgrade add="false"/>
   </property>
   <property>
+    <name>yarn.nodemanager.log-aggregation.num-log-files-per-app</name>
+    <value>336</value>
+    <description>The number of files to retain per container when continuous log aggregation is used</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
     <name>yarn.nodemanager.container-metrics.unregister-delay-ms</name>
     <value>60000</value>
     <description>The delay time ms to unregister container metrics after completion.</description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5100632/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
index 100df8f..15837df 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
@@ -146,6 +146,11 @@
                     to-key="yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled"
                     default-value="false"/>
         </definition>
+        <definition xsi:type="configure" id="yarn_site_retained_log_count" summary="Updating Yarn retained file count for continuous Log Aggregation">
+          <type>yarn-site</type>
+          <set key="yarn.nodemanager.log-aggregation.num-log-files-per-app"
+               value="336" />
+        </definition>
       </changes>
     </component>
   </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5100632/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index 6e92141..8c7a9b1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -299,12 +299,19 @@
       </execute-stage>
 
       <!--Yarn-->
-      <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for Resource Manager">
+      <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for underutilized_preemption">
         <task xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption">
           <summary>Updating underutilized_preemption setting</summary>
         </task>
       </execute-stage>
 
+      <!--Yarn-->
+      <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for YARN app log retaintion">
+        <task xsi:type="configure" id="yarn_site_retained_log_count">
+          <summary>Updating log aggregation retained files setting</summary>
+        </task>
+      </execute-stage>
+
       <!--TEZ-->
       <execute-stage service="TEZ" component="TEZ_CLIENT" title="Verify LZO codec path for Tez">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath">

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5100632/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
index bc68754..b83525a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
@@ -670,6 +670,7 @@
           <task xsi:type="configure" id="yarn_log4j_parameterize" />
           <task xsi:type="configure" id="yarn_env_security_opts" />
           <task xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption" />
+          <task xsi:type="configure" id="yarn_site_retained_log_count" />
         </pre-upgrade>
         <pre-downgrade />
         <upgrade>


[33/50] [abbrv] ambari git commit: AMBARI-19975. Hive2. Visual Explain -Show additional info when you click on a box, i.e. can drill down to get more info on operators. (pallavkul)

Posted by nc...@apache.org.
AMBARI-19975. Hive2. Visual Explain -Show additional info when you click on a box, i.e. can drill down to get more info on operators. (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1d1253a6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1d1253a6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1d1253a6

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 1d1253a6471113a78dd22faf453439ab13a19e48
Parents: 2364295
Author: pallavkul <pa...@gmail.com>
Authored: Sat Feb 11 17:06:43 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Sat Feb 11 17:06:43 2017 +0530

----------------------------------------------------------------------
 .../ui/app/components/visual-explain-detail.js  | 31 +++++++++++++
 .../ui/app/components/visual-explain.js         | 26 ++++++++++-
 .../resources/ui/app/routes/queries/query.js    | 13 ++++--
 .../src/main/resources/ui/app/styles/app.scss   | 49 ++++++++++++++++++++
 .../components/visual-explain-detail.hbs        | 29 ++++++++++++
 .../app/templates/components/visual-explain.hbs |  5 ++
 6 files changed, 148 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1d1253a6/contrib/views/hive20/src/main/resources/ui/app/components/visual-explain-detail.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/visual-explain-detail.js b/contrib/views/hive20/src/main/resources/ui/app/components/visual-explain-detail.js
new file mode 100644
index 0000000..2c9ba00
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/visual-explain-detail.js
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+
+  classNames:['visual-explain-detail-container'],
+
+  actions:{
+    closeModal(){
+      this.sendAction('closeModal');
+    }
+  }
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d1253a6/contrib/views/hive20/src/main/resources/ui/app/components/visual-explain.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/visual-explain.js b/contrib/views/hive20/src/main/resources/ui/app/components/visual-explain.js
index 6551974..6805bb8 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/visual-explain.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/visual-explain.js
@@ -20,8 +20,13 @@ import Ember from 'ember';
 import explain from '../utils/hive-explainer';
 
 export default Ember.Component.extend({
+
   visualExplainJson:'',
 
+  showDetailsModal: false,
+
+  explainDetailData: '',
+
   visualExplainInput: Ember.computed('visualExplainJson', function () {
     return this.get('visualExplainJson');
   }),
@@ -39,6 +44,7 @@ export default Ember.Component.extend({
       .attr('height', height);
 
     const container = svg.append('g');
+
     const zoom =
       d3.zoom()
         .scaleExtent([1 / 10, 4])
@@ -49,16 +55,34 @@ export default Ember.Component.extend({
       svg
         .call(zoom);
 
-    const onRequestDetail = data => this.sendAction('showStepDetail', data);
+    const onRequestDetail = data => this.set('explainDetailData', JSON.stringify( data, null, '  ') );
 
     explain(JSON.parse(this.get('visualExplainInput')), svg, container, zoom, onRequestDetail);
 
   },
 
+  click(event){
+
+    if(this.get('explainDetailData') === ''){
+      return;
+    }
+
+    Ember.run.later(() => {
+      this.set('showDetailsModal', true);
+    }, 100);
+  },
+
   actions:{
     expandQueryResultPanel(){
       this.sendAction('expandQueryResultPanel');
+    },
+
+    closeModal(){
+      this.set('showDetailsModal', false);
+      this.set('explainDetailData', '');
+      false;
     }
+
   }
 
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d1253a6/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index 4f60229..72682f5 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -221,11 +221,8 @@ export default Ember.Route.extend({
           .then((status) => {
 
               self.get('controller').set('isJobSuccess', true);
-              self.send('getJob', data);
 
-              if(isVisualExplainQuery){
-                self.send('showVisualExplain');
-              }
+              self.send('getJob', data);
 
               //Last log
               self.send('fetchLogs');
@@ -290,6 +287,8 @@ export default Ember.Route.extend({
       var self = this;
       var data = data;
 
+      let isVisualExplainQuery = this.get('controller').get('isVisualExplainQuery');
+
       let jobId = data.job.id;
       let dateSubmitted = data.job.dateSubmitted;
 
@@ -312,6 +311,12 @@ export default Ember.Route.extend({
         self.get('controller.model').set('previousPage', previousPage + 1 );
         self.get('controller.model').set('nextPage', nextPage + 1);
 
+        if(isVisualExplainQuery){
+          Ember.run.later(() => {
+            self.send('showVisualExplain');
+          }, 500);
+        }
+
       }, function(reason) {
         // on rejection
         console.log('reason' , reason);

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d1253a6/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
index 1dc86d7..6469b2e 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
@@ -318,6 +318,7 @@ pre {
 
 .query-editor-results {
   padding-right: 15px;
+  position: relative;
 }
 
 .query-result-table {
@@ -980,3 +981,51 @@ ul.dropdown-menu {
 .break-word{
   word-break: break-all;
 }
+
+.visual-explain-detail-container {
+  width: 500px;
+  position: absolute;
+  top: 50px;
+  right: 0;
+  background-color: #FFF;
+  max-height: 70vh;
+  overflow-y: auto;
+}
+
+.visual-explain-detail{
+  border: 1px solid #DDD;
+  .close{
+    margin: 10px 15px;
+    z-index: 9999;
+  }
+
+  .header{
+    border-bottom: 1px solid #DDD; padding: 10px 0;
+    .icon{
+      padding:10px;
+    }
+    .join-type {
+      font-size: 20px; font-weight: bold
+    }
+  }
+
+  .vector-info{
+    border: 1px solid #DDD; padding:20px; background-color:#ED6265; color:#FFF; font-weight: bold; margin: 10px 0;
+  }
+  .block{
+    border: 1px solid #DDD; padding:20px; margin: 10px 0;
+
+    .block-header{
+      font-size: 20px; font-weight: bold;
+    }
+    .block-body{
+      padding: 10px 0;
+    }
+
+    .divider{
+      height: 1px; border-top:1px solid #DDD; margin: 10px 0;
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d1253a6/contrib/views/hive20/src/main/resources/ui/app/templates/components/visual-explain-detail.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/visual-explain-detail.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/visual-explain-detail.hbs
new file mode 100644
index 0000000..3df8e94
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/visual-explain-detail.hbs
@@ -0,0 +1,29 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="clearfix visual-explain-detail">
+  <div class="pull-right close">  <a href="javascript:void(0)" {{action "closeModal"}}>{{fa-icon "close"}}</a></div>
+  <div class="clearfix header" >
+  </div>
+  <div class="col-md-12">
+    <div>&nbsp;</div>
+    <pre class="prettyprint">{{explainDetailData}}</pre>
+  </div>
+</div>
+
+{{yield}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d1253a6/contrib/views/hive20/src/main/resources/ui/app/templates/components/visual-explain.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/visual-explain.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/visual-explain.hbs
index 4238d43..e0ceaa2 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/visual-explain.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/visual-explain.hbs
@@ -34,4 +34,9 @@
   <div id="explain-container" ></div>
 {{/unless}}
 
+{{#if showDetailsModal}}
+  {{visual-explain-detail closeModal='closeModal' explainDetailData=explainDetailData}}
+{{/if}}
+
+
 {{yield}}


[05/50] [abbrv] ambari git commit: AMBARI-19928. Solr grafana dashboards. (Willy Solaligue via yusaku)

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/00ed4159/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-solr-hosts-dashboard.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-solr-hosts-dashboard.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-solr-hosts-dashboard.json
new file mode 100644
index 0000000..6ae1b22
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-solr-hosts-dashboard.json
@@ -0,0 +1,538 @@
+{
+  "id": null,
+  "title": "Solr - Hosts",
+  "originalTitle": "Solr - Hosts",
+  "tags": ["solr"],
+  "style": "dark",
+  "timezone": "browser",
+  "editable": true,
+  "hideControls": false,
+  "sharedCrosshair": false,
+  "rows": [
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "25px",
+      "panels": [
+        {
+          "content": "<h4 align=\"center\">Solr Hosts Dashboard</h4>",
+          "editable": true,
+          "error": false,
+          "height": "25px",
+          "id": 3,
+          "isNew": true,
+          "links": [],
+          "mode": "html",
+          "span": 12,
+          "style": {},
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "title": "Row"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 1,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "repeat": null,
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "app": "solr-host-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hide": false,
+              "metric": "solr.admin.info.system.processCpuLoad",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Solr CPU Utilization",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "percentunit",
+            "percent"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 2,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "app": "solr-host-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.info.jvm.memory.used",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Solr Memory Utilization",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "bytes"
+          ]
+        }
+      ],
+      "title": "New row"
+    }
+  ],
+  "time": {
+    "from": "now-5m",
+    "to": "now"
+  },
+  "timepicker": {
+    "now": true,
+    "refresh_intervals": [
+      "5s",
+      "10s",
+      "30s",
+      "1m",
+      "5m",
+      "15m",
+      "30m",
+      "1h",
+      "2h",
+      "1d"
+    ],
+    "time_options": [
+      "5m",
+      "15m",
+      "1h",
+      "6h",
+      "12h",
+      "24h",
+      "2d",
+      "7d",
+      "30d"
+    ]
+  },
+  "templating": {
+    "list": [
+      {
+        "allFormat": "glob",
+        "current": {
+          "text": "solr-host-app",
+          "value": "solr-host-app"
+        },
+        "datasource": null,
+        "includeAll": false,
+        "multi": false,
+        "multiFormat": "glob",
+        "name": "components",
+        "options": [
+          {
+            "selected": true,
+            "text": "solr-host-app",
+            "value": "solr-host-app"
+          }
+        ],
+        "query": "solr-host-app",
+        "refresh": false,
+        "regex": "",
+        "type": "custom"
+      },
+      {
+        "allFormat": "glob",
+        "current": {
+        },
+        "datasource": null,
+        "includeAll": false,
+        "multi": false,
+        "multiFormat": "glob",
+        "name": "hosts",
+        "options": [
+
+        ],
+        "query": "hosts",
+        "refresh": true,
+        "regex": "",
+        "type": "query"
+      }
+    ]
+  },
+  "annotations": {
+    "list": []
+  },
+  "refresh": false,
+  "schemaVersion": 8,
+  "version": 9,
+  "links": []
+}
+{
+  "id": null,
+  "title": "Solr Hosts Dashboard",
+  "originalTitle": "Solr Hosts Dashboard",
+  "tags": ["solr"],
+  "style": "dark",
+  "timezone": "browser",
+  "editable": true,
+  "hideControls": false,
+  "sharedCrosshair": false,
+  "rows": [
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "25px",
+      "panels": [
+        {
+          "content": "<h4 align=\"center\">Solr dashboards</h4>",
+          "editable": true,
+          "error": false,
+          "height": "25px",
+          "id": 3,
+          "isNew": true,
+          "links": [],
+          "mode": "html",
+          "span": 12,
+          "style": {},
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "title": "Row"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 1,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "repeat": null,
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "app": "solr-host-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hide": false,
+              "metric": "solr.admin.info.system.processCpuLoad",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Solr CPU Utilization",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "percentunit",
+            "percent"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 2,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "app": "solr-host-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.info.jvm.memory.used",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Solr Memory Utilization",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "bytes"
+          ]
+        }
+      ],
+      "title": "New row"
+    }
+  ],
+  "time": {
+    "from": "now-5m",
+    "to": "now"
+  },
+  "timepicker": {
+    "now": true,
+    "refresh_intervals": [
+      "5s",
+      "10s",
+      "30s",
+      "1m",
+      "5m",
+      "15m",
+      "30m",
+      "1h",
+      "2h",
+      "1d"
+    ],
+    "time_options": [
+      "5m",
+      "15m",
+      "1h",
+      "6h",
+      "12h",
+      "24h",
+      "2d",
+      "7d",
+      "30d"
+    ]
+  },
+  "templating": {
+    "list": [
+      {
+        "allFormat": "glob",
+        "current": {
+          "text": "solr-host-app",
+          "value": "solr-host-app"
+        },
+        "datasource": null,
+        "includeAll": false,
+        "multi": false,
+        "multiFormat": "glob",
+        "name": "components",
+        "options": [
+          {
+            "selected": true,
+            "text": "solr-host-app",
+            "value": "solr-host-app"
+          }
+        ],
+        "query": "solr-host-app",
+        "refresh": false,
+        "regex": "",
+        "type": "custom"
+      },
+      {
+        "allFormat": "glob",
+        "current": {
+        },
+        "datasource": null,
+        "includeAll": false,
+        "multi": false,
+        "multiFormat": "glob",
+        "name": "hosts",
+        "options": [
+
+        ],
+        "query": "hosts",
+        "refresh": true,
+        "regex": "",
+        "type": "query"
+      }
+    ]
+  },
+  "annotations": {
+    "list": []
+  },
+  "refresh": false,
+  "schemaVersion": 8,
+  "version": 9,
+  "links": []
+}
\ No newline at end of file


[29/50] [abbrv] ambari git commit: AMBARI-19886. Update ambari managed llap queue to set preemption policy. (Siddharth Seth via Swapan Shridhar).

Posted by nc...@apache.org.
AMBARI-19886. Update ambari managed llap queue to set preemption policy. (Siddharth Seth via Swapan Shridhar).


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e7d0e781
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e7d0e781
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e7d0e781

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: e7d0e781b3506d20806f521b30ce4629e67641f5
Parents: f4c8384
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Fri Feb 10 16:10:19 2017 -0800
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Fri Feb 10 16:10:25 2017 -0800

----------------------------------------------------------------------
 .../stacks/HDP/2.5/services/stack_advisor.py    | 15 +++++++-
 .../stacks/2.5/common/test_stack_advisor.py     | 36 +++++++++++++++++---
 2 files changed, 45 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d0e781/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 9fe8fc3..4de9a41 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -1532,13 +1532,18 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
               elif prop == 'yarn.scheduler.capacity.root.default.maximum-capacity':
                 updated_cap_sched_configs_str = updated_cap_sched_configs_str \
                                             + prop + "=" + adjusted_default_queue_cap + "\n"
+              elif prop == 'yarn.scheduler.capacity.root.ordering-policy':
+                # Don't put this in again. We're re-writing the llap section.
+                pass
               elif prop.startswith('yarn.') and '.llap.' not in prop:
                 updated_cap_sched_configs_str = updated_cap_sched_configs_str + prop + "=" + val + "\n"
 
           # Now, append the 'llap' queue related properties
-          updated_cap_sched_configs_str += """yarn.scheduler.capacity.root.{0}.user-limit-factor=1
+          updated_cap_sched_configs_str += """yarn.scheduler.capacity.root.ordering-policy=priority-utilization
+yarn.scheduler.capacity.root.{0}.user-limit-factor=1
 yarn.scheduler.capacity.root.{0}.state=RUNNING
 yarn.scheduler.capacity.root.{0}.ordering-policy=fifo
+yarn.scheduler.capacity.root.{0}.priority=10
 yarn.scheduler.capacity.root.{0}.minimum-user-limit-percent=100
 yarn.scheduler.capacity.root.{0}.maximum-capacity={1}
 yarn.scheduler.capacity.root.{0}.capacity={1}
@@ -1560,13 +1565,18 @@ yarn.scheduler.capacity.root.{0}.maximum-am-resource-percent=1""".format(llap_qu
                 putCapSchedProperty(prop, adjusted_default_queue_cap)
               elif prop == 'yarn.scheduler.capacity.root.default.maximum-capacity':
                 putCapSchedProperty(prop, adjusted_default_queue_cap)
+              elif prop == 'yarn.scheduler.capacity.root.ordering-policy':
+                # Don't put this in again. We're re-writing the llap section.
+                pass
               elif prop.startswith('yarn.') and '.llap.' not in prop:
                 putCapSchedProperty(prop, val)
 
           # Add new 'llap' queue related configs.
+          putCapSchedProperty("yarn.scheduler.capacity.root.ordering-policy", "priority-utilization")
           putCapSchedProperty("yarn.scheduler.capacity.root." + llap_queue_name + ".user-limit-factor", "1")
           putCapSchedProperty("yarn.scheduler.capacity.root." + llap_queue_name + ".state", "RUNNING")
           putCapSchedProperty("yarn.scheduler.capacity.root." + llap_queue_name + ".ordering-policy", "fifo")
+          putCapSchedProperty("yarn.scheduler.capacity.root." + llap_queue_name + ".priority", "10")
           putCapSchedProperty("yarn.scheduler.capacity.root." + llap_queue_name + ".minimum-user-limit-percent", "100")
           putCapSchedProperty("yarn.scheduler.capacity.root." + llap_queue_name + ".maximum-capacity", llap_queue_cap_perc)
           putCapSchedProperty("yarn.scheduler.capacity.root." + llap_queue_name + ".capacity", llap_queue_cap_perc)
@@ -1634,6 +1644,9 @@ yarn.scheduler.capacity.root.{0}.maximum-am-resource-percent=1""".format(llap_qu
                 # Set 'default' max. capacity back to maximum val
                 updated_default_queue_configs = updated_default_queue_configs \
                                             + prop + "="+DEFAULT_MAX_CAPACITY + "\n"
+              elif prop == 'yarn.scheduler.capacity.root.ordering-policy':
+                # Don't set this property. The default will be picked up.
+                pass
               elif prop.startswith('yarn.'):
                 updated_default_queue_configs = updated_default_queue_configs + prop + "=" + val + "\n"
             else: # Update 'llap' related configs in 'updated_llap_queue_configs'

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d0e781/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index ef83bca..176dd99 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -158,9 +158,11 @@ class TestHDP25StackAdvisor(TestCase):
                               'yarn.scheduler.capacity.root.acl_administer_queue=*\n'
                               'yarn.scheduler.capacity.node-locality-delay=40\n'
                               'yarn.scheduler.capacity.queue-mappings-override.enable=false\n'
+                              'yarn.scheduler.capacity.root.ordering-policy=priority-utilization\n'
                               'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                               'yarn.scheduler.capacity.root.llap.state=RUNNING\n'
                               'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                              'yarn.scheduler.capacity.root.llap.priority=10\n'
                               'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
 
                               'yarn.scheduler.capacity.root.llap.maximum-capacity=20\n'
@@ -188,9 +190,11 @@ class TestHDP25StackAdvisor(TestCase):
                               'yarn.scheduler.capacity.root.acl_administer_queue=*\n'
                               'yarn.scheduler.capacity.node-locality-delay=40\n'
                               'yarn.scheduler.capacity.queue-mappings-override.enable=false\n'
+                              'yarn.scheduler.capacity.root.ordering-policy=priority-utilization\n'
                               'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                               'yarn.scheduler.capacity.root.llap.state=RUNNING\n'
                               'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                              'yarn.scheduler.capacity.root.llap.priority=10\n'
                               'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                               'yarn.scheduler.capacity.root.llap.maximum-capacity=40\n'
                               'yarn.scheduler.capacity.root.llap.capacity=40\n'
@@ -220,6 +224,7 @@ class TestHDP25StackAdvisor(TestCase):
                               'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                               'yarn.scheduler.capacity.root.llap.state=STOPPED\n'
                               'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                              'yarn.scheduler.capacity.root.llap.priority=10\n'
                               'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                               'yarn.scheduler.capacity.root.llap.maximum-capacity=0\n'
                               'yarn.scheduler.capacity.root.llap.capacity=0\n'
@@ -749,9 +754,11 @@ class TestHDP25StackAdvisor(TestCase):
                                   'yarn.scheduler.capacity.root.acl_administer_queue=*\n'
                                   'yarn.scheduler.capacity.node-locality-delay=40\n'
                                   'yarn.scheduler.capacity.queue-mappings-override.enable=false\n'
+                                  'yarn.scheduler.capacity.root.ordering-policy=priority-utilization\n'
                                   'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                                   'yarn.scheduler.capacity.root.llap.state=RUNNING\n'
                                   'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                                  'yarn.scheduler.capacity.root.llap.priority=10\n'
                                   'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                                   'yarn.scheduler.capacity.root.llap.maximum-capacity=40\n'
                                   'yarn.scheduler.capacity.root.llap.capacity=40\n'
@@ -922,6 +929,7 @@ class TestHDP25StackAdvisor(TestCase):
                                   'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                                   'yarn.scheduler.capacity.root.llap.state=STOPPED\n'
                                   'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                                  'yarn.scheduler.capacity.root.llap.priority=10\n'
                                   'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                                   'yarn.scheduler.capacity.root.llap.maximum-capacity=40\n'
                                   'yarn.scheduler.capacity.root.llap.capacity=40\n'
@@ -1092,9 +1100,11 @@ class TestHDP25StackAdvisor(TestCase):
                                   'yarn.scheduler.capacity.root.acl_administer_queue=*\n'
                                   'yarn.scheduler.capacity.node-locality-delay=40\n'
                                   'yarn.scheduler.capacity.queue-mappings-override.enable=false\n'
+                                  'yarn.scheduler.capacity.root.ordering-policy=priority-utilization\n'
                                   'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                                   'yarn.scheduler.capacity.root.llap.state=RUNNING\n'
                                   'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                                  'yarn.scheduler.capacity.root.llap.priority=10\n'
                                   'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                                   'yarn.scheduler.capacity.root.llap.maximum-capacity=20\n'
                                   'yarn.scheduler.capacity.root.llap.capacity=20\n'
@@ -1292,9 +1302,11 @@ class TestHDP25StackAdvisor(TestCase):
                                   'yarn.scheduler.capacity.root.acl_administer_queue=*\n'
                                   'yarn.scheduler.capacity.node-locality-delay=40\n'
                                   'yarn.scheduler.capacity.queue-mappings-override.enable=false\n'
+                                  'yarn.scheduler.capacity.root.ordering-policy=priority-utilization\n'
                                   'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                                   'yarn.scheduler.capacity.root.llap.state=RUNNING\n'
                                   'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                                  'yarn.scheduler.capacity.root.llap.priority=10\n'
                                   'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                                   'yarn.scheduler.capacity.root.llap.maximum-capacity=0\n'
                                   'yarn.scheduler.capacity.root.llap.capacity=0\n'
@@ -1486,9 +1498,11 @@ class TestHDP25StackAdvisor(TestCase):
                                   'yarn.scheduler.capacity.root.acl_administer_queue=*\n'
                                   'yarn.scheduler.capacity.node-locality-delay=40\n'
                                   'yarn.scheduler.capacity.queue-mappings-override.enable=false\n'
+                                  'yarn.scheduler.capacity.root.ordering-policy=priority-utilization\n'
                                   'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                                   'yarn.scheduler.capacity.root.llap.state=RUNNING\n'
                                   'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                                  'yarn.scheduler.capacity.root.llap.priority=10\n'
                                   'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                                   'yarn.scheduler.capacity.root.llap.maximum-capacity=40\n'
                                   'yarn.scheduler.capacity.root.llap.capacity=40\n'
@@ -1689,9 +1703,11 @@ class TestHDP25StackAdvisor(TestCase):
                                   'yarn.scheduler.capacity.root.acl_administer_queue=*\n'
                                   'yarn.scheduler.capacity.node-locality-delay=40\n'
                                   'yarn.scheduler.capacity.queue-mappings-override.enable=false\n'
+                                  'yarn.scheduler.capacity.root.ordering-policy=priority-utilization\n'
                                   'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                                   'yarn.scheduler.capacity.root.llap.state=RUNNING\n'
                                   'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                                  'yarn.scheduler.capacity.root.llap.priority=10\n'
                                   'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                                   'yarn.scheduler.capacity.root.llap.maximum-capacity=40\n'
                                   'yarn.scheduler.capacity.root.llap.capacity=40\n'
@@ -1887,9 +1903,11 @@ class TestHDP25StackAdvisor(TestCase):
                                   'yarn.scheduler.capacity.root.acl_administer_queue=*\n'
                                   'yarn.scheduler.capacity.node-locality-delay=40\n'
                                   'yarn.scheduler.capacity.queue-mappings-override.enable=false\n'
+                                  'yarn.scheduler.capacity.root.ordering-policy=priority-utilization\n'
                                   'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                                   'yarn.scheduler.capacity.root.llap.state=RUNNING\n'
                                   'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                                  'yarn.scheduler.capacity.root.llap.priority=10\n'
                                   'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                                   'yarn.scheduler.capacity.root.llap.maximum-capacity=40\n'
                                   'yarn.scheduler.capacity.root.llap.capacity=40\n'
@@ -1962,7 +1980,7 @@ class TestHDP25StackAdvisor(TestCase):
     self.stackAdvisor.recommendYARNConfigurations(configurations, clusterData, services, self.hosts)
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.server2.tez.sessions.per.default.queue'], '1')
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'minimum': '1', 'maximum': '4'})
-    self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=66.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=66.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.root.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.mi
 nimum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=34.0\nyarn.scheduler.capacity.root.llap.capacity=34.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
+    self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=66.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=66.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.ordering-policy=priority-utilization\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.r
 oot.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.priority=10\nyarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=34.0\nyarn.scheduler.capacity.root.llap.capacity=34.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
 
     self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
     self.assertTrue('num_llap_nodes' not in configurations['hive-interactive-env']['properties'])
@@ -2087,9 +2105,11 @@ class TestHDP25StackAdvisor(TestCase):
                                   'yarn.scheduler.capacity.root.acl_administer_queue=*\n'
                                   'yarn.scheduler.capacity.node-locality-delay=40\n'
                                   'yarn.scheduler.capacity.queue-mappings-override.enable=false\n'
+                                  'yarn.scheduler.capacity.root.ordering-policy=priority-utilization\n'
                                   'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                                   'yarn.scheduler.capacity.root.llap.state=RUNNING\n'
                                   'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                                  'yarn.scheduler.capacity.root.llap.priority=10\n'
                                   'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                                   'yarn.scheduler.capacity.root.llap.maximum-capacity=40\n'
                                   'yarn.scheduler.capacity.root.llap.capacity=40\n'
@@ -2360,7 +2380,7 @@ class TestHDP25StackAdvisor(TestCase):
 
     self.stackAdvisor.recommendYARNConfigurations(configurations, clusterData, services, self.hosts)
 
-    self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=0.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=0.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.root.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.mini
 mum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=100.0\nyarn.scheduler.capacity.root.llap.capacity=100.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
+    self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=0.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=0.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.ordering-policy=priority-utilization\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.roo
 t.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.priority=10\nyarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=100.0\nyarn.scheduler.capacity.root.llap.capacity=100.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4'})
 
     self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
@@ -2488,9 +2508,11 @@ class TestHDP25StackAdvisor(TestCase):
                                   'yarn.scheduler.capacity.root.acl_administer_queue=*\n'
                                   'yarn.scheduler.capacity.node-locality-delay=40\n'
                                   'yarn.scheduler.capacity.queue-mappings-override.enable=false\n'
+                                  'yarn.scheduler.capacity.root.ordering-policy=priority-utilization\n'
                                   'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                                   'yarn.scheduler.capacity.root.llap.state=RUNNING\n'
                                   'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                                  'yarn.scheduler.capacity.root.llap.priority=10\n'
                                   'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                                   'yarn.scheduler.capacity.root.llap.maximum-capacity=40\n'
                                   'yarn.scheduler.capacity.root.llap.capacity=40\n'
@@ -2563,7 +2585,7 @@ class TestHDP25StackAdvisor(TestCase):
 
     self.stackAdvisor.recommendYARNConfigurations(configurations, clusterData, services, self.hosts)
 
-    self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=2.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=2.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.root.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.mini
 mum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=98.0\nyarn.scheduler.capacity.root.llap.capacity=98.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
+    self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=2.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=2.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.ordering-policy=priority-utilization\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.roo
 t.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.priority=10\nyarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=98.0\nyarn.scheduler.capacity.root.llap.capacity=98.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.server2.tez.sessions.per.default.queue'], '1.0')
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4', 'minimum': '1'})
 
@@ -2688,9 +2710,11 @@ class TestHDP25StackAdvisor(TestCase):
                                   'yarn.scheduler.capacity.root.acl_administer_queue=*\n'
                                   'yarn.scheduler.capacity.node-locality-delay=40\n'
                                   'yarn.scheduler.capacity.queue-mappings-override.enable=false\n'
+                                  'yarn.scheduler.capacity.root.ordering-policy=priority-utilization\n'
                                   'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                                   'yarn.scheduler.capacity.root.llap.state=RUNNING\n'
                                   'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                                  'yarn.scheduler.capacity.root.llap.priority=10\n'
                                   'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                                   'yarn.scheduler.capacity.root.llap.maximum-capacity=40\n'
                                   'yarn.scheduler.capacity.root.llap.capacity=40\n'
@@ -2762,7 +2786,7 @@ class TestHDP25StackAdvisor(TestCase):
 
     self.stackAdvisor.recommendYARNConfigurations(configurations, clusterData, services, self.hosts)
 
-    self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=80.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=80.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.root.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.mi
 nimum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=20.0\nyarn.scheduler.capacity.root.llap.capacity=20.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
+    self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=80.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=80.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.ordering-policy=priority-utilization\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.r
 oot.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.priority=10\nyarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=20.0\nyarn.scheduler.capacity.root.llap.capacity=20.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.server2.tez.sessions.per.default.queue'], '1')
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4', 'minimum': '1'})
 
@@ -2886,9 +2910,11 @@ class TestHDP25StackAdvisor(TestCase):
                                   'yarn.scheduler.capacity.root.acl_administer_queue=*\n'
                                   'yarn.scheduler.capacity.node-locality-delay=40\n'
                                   'yarn.scheduler.capacity.queue-mappings-override.enable=false\n'
+                                  'yarn.scheduler.capacity.root.ordering-policy=priority-utilization\n'
                                   'yarn.scheduler.capacity.root.llap.user-limit-factor=1\n'
                                   'yarn.scheduler.capacity.root.llap.state=RUNNING\n'
                                   'yarn.scheduler.capacity.root.llap.ordering-policy=fifo\n'
+                                  'yarn.scheduler.capacity.root.llap.priority=10\n'
                                   'yarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\n'
                                   'yarn.scheduler.capacity.root.llap.maximum-capacity=40\n'
                                   'yarn.scheduler.capacity.root.llap.capacity=40\n'
@@ -2958,7 +2984,7 @@ class TestHDP25StackAdvisor(TestCase):
 
     self.stackAdvisor.recommendYARNConfigurations(configurations, clusterData, services, self.hosts)
 
-    self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=0.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=0.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.root.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.mini
 mum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=100.0\nyarn.scheduler.capacity.root.llap.capacity=100.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
+    self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=0.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=0.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.ordering-policy=priority-utilization\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.roo
 t.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.priority=10\nyarn.scheduler.capacity.root.llap.minimum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=100.0\nyarn.scheduler.capacity.root.llap.capacity=100.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4'})
 
     self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)


[26/50] [abbrv] ambari git commit: AMBARI-19831. ADDENDUM. HDP 3.0 TP - Support changed configs and scripts for YARN/MR (alejandro)

Posted by nc...@apache.org.
AMBARI-19831. ADDENDUM. HDP 3.0 TP - Support changed configs and scripts for YARN/MR (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4381561f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4381561f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4381561f

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 4381561f92aeb91b1a2b3cafff4a860efde81dff
Parents: e8a9961
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Fri Feb 10 13:09:12 2017 -0800
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Fri Feb 10 13:16:34 2017 -0800

----------------------------------------------------------------------
 .../common-services/YARN/3.0.0.3.0/package/scripts/service.py    | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4381561f/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/service.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/service.py
index 78b2428..e0d6475 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/service.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/service.py
@@ -44,7 +44,7 @@ def service(componentName, action='start', serviceName='yarn'):
   if serviceName == 'mapreduce' and componentName == 'historyserver':
     delete_pid_file = True
     daemon = format("{mapred_bin}/mr-jobhistory-daemon.sh")
-    pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-{componentName}.pid")
+    pid_file = format("{mapred_pid_dir}/hadoop-{mapred_user}-{componentName}.pid")
     usr = params.mapred_user
     log_dir = params.mapred_log_dir
   else:
@@ -52,7 +52,7 @@ def service(componentName, action='start', serviceName='yarn'):
     # may not work correctly when stopping the service
     delete_pid_file = False
     daemon = format("{yarn_bin}/yarn-daemon.sh")
-    pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-{componentName}.pid")
+    pid_file = format("{yarn_pid_dir}/hadoop-{yarn_user}-{componentName}.pid")
     usr = params.yarn_user
     log_dir = params.yarn_log_dir
 


[12/50] [abbrv] ambari git commit: AMBARI-19950. Hive View 2.0: Restrict user to change the bucket while editing a clustered table. (dipayanb)

Posted by nc...@apache.org.
AMBARI-19950. Hive View 2.0: Restrict user to change the bucket while editing a clustered table. (dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/92cf5615
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/92cf5615
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/92cf5615

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 92cf561520572ac65ca6e593729bb13a3edff8e1
Parents: 232b585
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Fri Feb 10 15:13:26 2017 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Fri Feb 10 15:14:37 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/components/table-advanced-settings.js | 5 +++++
 .../ui/app/templates/components/table-advanced-settings.hbs     | 2 +-
 2 files changed, 6 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/92cf5615/contrib/views/hive20/src/main/resources/ui/app/components/table-advanced-settings.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/table-advanced-settings.js b/contrib/views/hive20/src/main/resources/ui/app/components/table-advanced-settings.js
index 99a9bb6..5e58cd8 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/table-advanced-settings.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/table-advanced-settings.js
@@ -31,6 +31,7 @@ export default Ember.Component.extend({
   errors: [],
   editMode: false,
   disableTransactionInput: false,
+  disableNumBucketsInput: false,
 
   settings: {},
 
@@ -72,6 +73,10 @@ export default Ember.Component.extend({
     if(!Ember.isEmpty(this.get('settings.transactional')) && this.get('settings.transactional') && this.get('editMode')) {
       this.set('disableTransactionInput', true);
     }
+
+    if(!Ember.isEmpty(this.get('settings.numBuckets')) && this.get('settings.numBuckets') && this.get('editMode')) {
+      this.set('disableNumBucketsInput', true);
+    }
   },
 
   locationInputObserver: Ember.observer('showLocationInput', function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/92cf5615/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-advanced-settings.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-advanced-settings.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-advanced-settings.hbs
index f7a92ce..4aca56d 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-advanced-settings.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-advanced-settings.hbs
@@ -39,7 +39,7 @@
           <label class="col-md-2 control-label">Number of buckets</label>
           <div class="col-md-6">
             <div class="{{if hasNumBucketError 'has-error'}}">
-              {{input type="number" class="form-control" value=settings.numBuckets}}
+              {{input type="number" class="form-control" value=settings.numBuckets disabled=disableNumBucketsInput}}
               {{#if hasNumBucketError}}
                 <span class="help-block">{{numBucketErrorText}}</span>
               {{/if}}


[04/50] [abbrv] ambari git commit: AMBARI-19758 : Post Ambari upgrade AMS config properties changes are marking HDFS/YARN/Hive/HBase with restart required. (avijayan)

Posted by nc...@apache.org.
AMBARI-19758 : Post Ambari upgrade AMS config properties changes are marking HDFS/YARN/Hive/HBase with restart required. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/05ce603a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/05ce603a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/05ce603a

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 05ce603a9fa5a6a72af4de70cbc21208f6749384
Parents: 7abf4e6
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Thu Feb 9 13:45:57 2017 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Thu Feb 9 13:45:57 2017 -0800

----------------------------------------------------------------------
 .../hadoop-metrics2.properties.xml              | 125 -------------------
 .../common-services/HDFS/2.1.0.2.0/metainfo.xml |   1 -
 .../2.0.6/hooks/before-START/scripts/params.py  |   4 +-
 .../scripts/shared_initialization.py            |  17 ++-
 .../hadoop-metrics2.properties.xml              | 125 +++++++++++++++++++
 .../stacks/HDP/2.6/services/HDFS/metainfo.xml   |   3 +
 .../PERF/1.0/services/FAKEHDFS/metainfo.xml     |   1 -
 7 files changed, 143 insertions(+), 133 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/05ce603a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-metrics2.properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-metrics2.properties.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-metrics2.properties.xml
deleted file mode 100644
index 6b45e84..0000000
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-metrics2.properties.xml
+++ /dev/null
@@ -1,125 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <!-- hadoop-metrics2.properties -->
-  <property>
-    <name>content</name>
-    <display-name>hadoop-metrics2.properties template</display-name>
-    <description>This is the jinja template for hadoop-metrics2.properties file</description>
-    <value>
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# syntax: [prefix].[source|sink|jmx].[instance].[options]
-# See package.html for org.apache.hadoop.metrics2 for details
-
-{% if has_ganglia_server %}
-*.period=60
-
-*.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31
-*.sink.ganglia.period=10
-
-# default for supportsparse is false
-*.sink.ganglia.supportsparse=true
-
-.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
-.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
-
-# Hook up to the server
-namenode.sink.ganglia.servers={{ganglia_server_host}}:8661
-datanode.sink.ganglia.servers={{ganglia_server_host}}:8659
-jobtracker.sink.ganglia.servers={{ganglia_server_host}}:8662
-tasktracker.sink.ganglia.servers={{ganglia_server_host}}:8658
-maptask.sink.ganglia.servers={{ganglia_server_host}}:8660
-reducetask.sink.ganglia.servers={{ganglia_server_host}}:8660
-resourcemanager.sink.ganglia.servers={{ganglia_server_host}}:8664
-nodemanager.sink.ganglia.servers={{ganglia_server_host}}:8657
-historyserver.sink.ganglia.servers={{ganglia_server_host}}:8666
-journalnode.sink.ganglia.servers={{ganglia_server_host}}:8654
-nimbus.sink.ganglia.servers={{ganglia_server_host}}:8649
-supervisor.sink.ganglia.servers={{ganglia_server_host}}:8650
-
-resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue
-
-{% endif %}
-
-{% if has_metric_collector %}
-
-*.period={{metrics_collection_period}}
-*.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
-*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-*.sink.timeline.period={{metrics_collection_period}}
-*.sink.timeline.sendInterval={{metrics_report_interval}}000
-*.sink.timeline.slave.host.name={{hostname}}
-*.sink.timeline.zookeeper.quorum={{zookeeper_quorum}}
-*.sink.timeline.protocol={{metric_collector_protocol}}
-*.sink.timeline.port={{metric_collector_port}}
-
-# HTTPS properties
-*.sink.timeline.truststore.path = {{metric_truststore_path}}
-*.sink.timeline.truststore.type = {{metric_truststore_type}}
-*.sink.timeline.truststore.password = {{metric_truststore_password}}
-
-datanode.sink.timeline.collector.hosts={{ams_collector_hosts}}
-namenode.sink.timeline.collector.hosts={{ams_collector_hosts}}
-resourcemanager.sink.timeline.collector.hosts={{ams_collector_hosts}}
-nodemanager.sink.timeline.collector.hosts={{ams_collector_hosts}}
-jobhistoryserver.sink.timeline.collector.hosts={{ams_collector_hosts}}
-journalnode.sink.timeline.collector.hosts={{ams_collector_hosts}}
-maptask.sink.timeline.collector.hosts={{ams_collector_hosts}}
-reducetask.sink.timeline.collector.hosts={{ams_collector_hosts}}
-applicationhistoryserver.sink.timeline.collector.hosts={{ams_collector_hosts}}
-
-resourcemanager.sink.timeline.tagsForPrefix.yarn=Queue
-
-{% if is_nn_client_port_configured %}
-# Namenode rpc ports customization
-namenode.sink.timeline.metric.rpc.client.port={{nn_rpc_client_port}}
-{% endif %}
-{% if is_nn_dn_port_configured %}
-namenode.sink.timeline.metric.rpc.datanode.port={{nn_rpc_dn_port}}
-{% endif %}
-{% if is_nn_healthcheck_port_configured %}
-namenode.sink.timeline.metric.rpc.healthcheck.port={{nn_rpc_healthcheck_port}}
-{% endif %}
-
-{% endif %}
-    </value>
-    <value-attributes>
-      <type>content</type>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/05ce603a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
index 30de1be..da7daad 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
@@ -365,7 +365,6 @@
         <config-type>ranger-hdfs-policymgr-ssl</config-type>
         <config-type>ranger-hdfs-security</config-type>
         <config-type>ams-ssl-client</config-type>
-        <config-type>hadoop-metrics2.properties</config-type>
       </configuration-dependencies>
       <restartRequiredAfterRackChange>true</restartRequiredAfterRackChange>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/05ce603a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index c77a906..29c88b4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -46,7 +46,9 @@ component_list = default("/localComponents", [])
 
 hdfs_tmp_dir = default("/configurations/hadoop-env/hdfs_tmp_dir", "/tmp")
 
-hadoop_metrics2_properties_content = config['configurations']['hadoop-metrics2.properties']['content']
+hadoop_metrics2_properties_content = None
+if 'hadoop-metrics2.properties' in config['configurations']:
+  hadoop_metrics2_properties_content = config['configurations']['hadoop-metrics2.properties']['content']
 
 # hadoop default params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"

http://git-wip-us.apache.org/repos/asf/ambari/blob/05ce603a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
index cf958f0..9783aa4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
@@ -98,11 +98,18 @@ def setup_hadoop():
              owner=params.hdfs_user,
         )
 
-      File(os.path.join(params.hadoop_conf_dir, "hadoop-metrics2.properties"),
-           owner=params.hdfs_user,
-           group=params.user_group,
-           content=InlineTemplate(params.hadoop_metrics2_properties_content)
-      )
+      if params.hadoop_metrics2_properties_content:
+        File(os.path.join(params.hadoop_conf_dir, "hadoop-metrics2.properties"),
+             owner=params.hdfs_user,
+             group=params.user_group,
+             content=InlineTemplate(params.hadoop_metrics2_properties_content)
+             )
+      else:
+        File(os.path.join(params.hadoop_conf_dir, "hadoop-metrics2.properties"),
+             owner=params.hdfs_user,
+             group=params.user_group,
+             content=Template("hadoop-metrics2.properties.j2")
+             )
 
     if params.dfs_type == 'HCFS' and params.has_core_site and 'ECS_CLIENT' in params.component_list:
        create_dirs()

http://git-wip-us.apache.org/repos/asf/ambari/blob/05ce603a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-metrics2.properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-metrics2.properties.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-metrics2.properties.xml
new file mode 100644
index 0000000..4aadb83
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-metrics2.properties.xml
@@ -0,0 +1,125 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+  <!-- hadoop-metrics2.properties -->
+  <property>
+    <name>content</name>
+    <display-name>hadoop-metrics2.properties template</display-name>
+    <description>This is the jinja template for hadoop-metrics2.properties file</description>
+    <value>
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# syntax: [prefix].[source|sink|jmx].[instance].[options]
+# See package.html for org.apache.hadoop.metrics2 for details
+
+{% if has_ganglia_server %}
+*.period=60
+
+*.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31
+*.sink.ganglia.period=10
+
+# default for supportsparse is false
+*.sink.ganglia.supportsparse=true
+
+.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
+.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
+
+# Hook up to the server
+namenode.sink.ganglia.servers={{ganglia_server_host}}:8661
+datanode.sink.ganglia.servers={{ganglia_server_host}}:8659
+jobtracker.sink.ganglia.servers={{ganglia_server_host}}:8662
+tasktracker.sink.ganglia.servers={{ganglia_server_host}}:8658
+maptask.sink.ganglia.servers={{ganglia_server_host}}:8660
+reducetask.sink.ganglia.servers={{ganglia_server_host}}:8660
+resourcemanager.sink.ganglia.servers={{ganglia_server_host}}:8664
+nodemanager.sink.ganglia.servers={{ganglia_server_host}}:8657
+historyserver.sink.ganglia.servers={{ganglia_server_host}}:8666
+journalnode.sink.ganglia.servers={{ganglia_server_host}}:8654
+nimbus.sink.ganglia.servers={{ganglia_server_host}}:8649
+supervisor.sink.ganglia.servers={{ganglia_server_host}}:8650
+
+resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue
+
+{% endif %}
+
+{% if has_metric_collector %}
+
+*.period={{metrics_collection_period}}
+*.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+*.sink.timeline.period={{metrics_collection_period}}
+*.sink.timeline.sendInterval={{metrics_report_interval}}000
+*.sink.timeline.slave.host.name={{hostname}}
+*.sink.timeline.zookeeper.quorum={{zookeeper_quorum}}
+*.sink.timeline.protocol={{metric_collector_protocol}}
+*.sink.timeline.port={{metric_collector_port}}
+
+# HTTPS properties
+*.sink.timeline.truststore.path = {{metric_truststore_path}}
+*.sink.timeline.truststore.type = {{metric_truststore_type}}
+*.sink.timeline.truststore.password = {{metric_truststore_password}}
+
+datanode.sink.timeline.collector.hosts={{ams_collector_hosts}}
+namenode.sink.timeline.collector.hosts={{ams_collector_hosts}}
+resourcemanager.sink.timeline.collector.hosts={{ams_collector_hosts}}
+nodemanager.sink.timeline.collector.hosts={{ams_collector_hosts}}
+jobhistoryserver.sink.timeline.collector.hosts={{ams_collector_hosts}}
+journalnode.sink.timeline.collector.hosts={{ams_collector_hosts}}
+maptask.sink.timeline.collector.hosts={{ams_collector_hosts}}
+reducetask.sink.timeline.collector.hosts={{ams_collector_hosts}}
+applicationhistoryserver.sink.timeline.collector.hosts={{ams_collector_hosts}}
+
+resourcemanager.sink.timeline.tagsForPrefix.yarn=Queue
+
+{% if is_nn_client_port_configured %}
+# Namenode rpc ports customization
+namenode.sink.timeline.metric.rpc.client.port={{nn_rpc_client_port}}
+{% endif %}
+{% if is_nn_dn_port_configured %}
+namenode.sink.timeline.metric.rpc.datanode.port={{nn_rpc_dn_port}}
+{% endif %}
+{% if is_nn_healthcheck_port_configured %}
+namenode.sink.timeline.metric.rpc.healthcheck.port={{nn_rpc_healthcheck_port}}
+{% endif %}
+
+{% endif %}
+    </value>
+    <value-attributes>
+      <type>content</type>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/05ce603a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/metainfo.xml
index 1fc7f51..c80b2b4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/metainfo.xml
@@ -21,6 +21,9 @@
     <service>
       <name>HDFS</name>
       <version>2.7.3.2.6</version>
+      <configuration-dependencies>
+        <config-type>hadoop-metrics2.properties</config-type>
+      </configuration-dependencies>
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/05ce603a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/metainfo.xml
index 14f15fc..99cedd9 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/metainfo.xml
@@ -240,7 +240,6 @@
         <config-type>ranger-hdfs-policymgr-ssl</config-type>
         <config-type>ranger-hdfs-security</config-type>
         <config-type>ams-ssl-client</config-type>
-        <config-type>hadoop-metrics2.properties</config-type>
       </configuration-dependencies>
       <restartRequiredAfterRackChange>true</restartRequiredAfterRackChange>
 


[41/50] [abbrv] ambari git commit: AMBARI-19968. Control Log Level for all Hive components for log4j - UT fix (Madhuvanthi Radhakrishnan via smohanty)

Posted by nc...@apache.org.
AMBARI-19968. Control Log Level for all Hive components for log4j - UT fix (Madhuvanthi Radhakrishnan via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b695bf2b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b695bf2b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b695bf2b

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b695bf2bfcb96b48e7896a6242d25cf786cb46df
Parents: 3a95192
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Sun Feb 12 19:10:12 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Sun Feb 12 19:10:12 2017 -0800

----------------------------------------------------------------------
 .../org/apache/ambari/server/state/theme/TabLayout.java     | 9 ++++-----
 1 file changed, 4 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b695bf2b/ambari-server/src/main/java/org/apache/ambari/server/state/theme/TabLayout.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/theme/TabLayout.java b/ambari-server/src/main/java/org/apache/ambari/server/state/theme/TabLayout.java
index 4f6cf8f..98222de 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/theme/TabLayout.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/theme/TabLayout.java
@@ -19,16 +19,15 @@
 package org.apache.ambari.server.state.theme;
 
 
-
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
-import org.codehaus.jackson.annotate.JsonProperty;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.annotate.JsonProperty;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
 
 @JsonSerialize(include= JsonSerialize.Inclusion.NON_NULL)
 @JsonIgnoreProperties(ignoreUnknown = true)


[20/50] [abbrv] ambari git commit: AMBARI-19930. The service check status was set to TIMEOUT even if service check was failed. (mpapirkovskyy)

Posted by nc...@apache.org.
AMBARI-19930. The service check status was set to TIMEOUT even if service check was failed. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c9bea4ab
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c9bea4ab
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c9bea4ab

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: c9bea4ab8f8a042e60103e8bb8880fc718fa3cf3
Parents: fc9788a
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Thu Feb 9 20:11:26 2017 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Fri Feb 10 16:00:21 2017 +0200

----------------------------------------------------------------------
 .../server/actionmanager/ActionScheduler.java   | 38 +++++++++++---
 .../actionmanager/TestActionScheduler.java      | 54 ++++++++------------
 2 files changed, 53 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c9bea4ab/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
index dabcb98..fa2ad4f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
@@ -845,6 +845,28 @@ class ActionScheduler implements Runnable {
             commandsToSchedule.add(c);
             LOG.trace("===> commandsToSchedule(reschedule)=" + commandsToSchedule.size());
           }
+        } else if (isHostStateUnknown(s, hostObj, roleStr)) {
+          String message = "Action was aborted due agent is not heartbeating or was restarted.";
+          LOG.warn("Host: {}, role: {}, actionId: {} . {}", host, roleStr,
+            s.getActionId(), message);
+
+          db.abortHostRole(host, s.getRequestId(), s.getStageId(), c.getRole(), message);
+
+          if (null != cluster) {
+            if (!RoleCommand.CUSTOM_COMMAND.equals(c.getRoleCommand())
+              && !RoleCommand.SERVICE_CHECK.equals(c.getRoleCommand())
+              && !RoleCommand.ACTIONEXECUTE.equals(c.getRoleCommand())) {
+              //commands above don't affect host component state (e.g. no in_progress state in process), transition will fail
+              transitionToFailedState(cluster.getClusterName(), c.getServiceName(), roleStr, host, now, false);
+            }
+            if (c.getRoleCommand().equals(RoleCommand.ACTIONEXECUTE)) {
+              processActionDeath(cluster.getClusterName(), c.getHostname(), roleStr);
+            }
+          }
+
+          // Dequeue command
+          LOG.info("Removing command from queue, host={}, commandId={} ", host, c.getCommandId());
+          actionQueue.dequeue(host, c.getCommandId());
         } else if (status.equals(HostRoleStatus.PENDING)) {
           // in case of DEPENDENCY_ORDERED stage command can be scheduled only if all of it's dependencies are
           // already finished
@@ -1030,13 +1052,6 @@ class ActionScheduler implements Runnable {
       return false;
     }
 
-    // Fast fail task if host state is unknown
-    if (null != host &&
-      (host.getState().equals(HostState.HEARTBEAT_LOST) || wasAgentRestartedDuringOperation(host, stage, role))) {
-      LOG.debug("Timing out action since agent is not heartbeating or agent was restarted.");
-      return true;
-    }
-
     // tasks are held in a variety of in-memory maps that require a hostname key
     // host being null is ok - that means it's a server-side task
     String hostName = (null == host) ? null : host.getHostName();
@@ -1053,6 +1068,15 @@ class ActionScheduler implements Runnable {
     return false;
   }
 
+  private boolean isHostStateUnknown(Stage stage, Host host, String role) {
+    if (null != host &&
+      (host.getState().equals(HostState.HEARTBEAT_LOST) || wasAgentRestartedDuringOperation(host, stage, role))) {
+      LOG.debug("Abort action since agent is not heartbeating or agent was restarted.");
+      return true;
+    }
+    return false;
+  }
+
   private boolean hasCommandInProgress(Stage stage, String host) {
     List<ExecutionCommandWrapper> commandWrappers = stage.getExecutionCommands(host);
     for (ExecutionCommandWrapper wrapper : commandWrappers) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9bea4ab/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
index ade625a..653ad2c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
@@ -391,7 +391,7 @@ public class TestActionScheduler {
     when(host.getState()).thenReturn(HostState.HEARTBEAT_LOST);
     when(host.getHostName()).thenReturn(hostname);
 
-    List<Stage> stages = new ArrayList<Stage>();
+    final List<Stage> stages = new ArrayList<Stage>();
     final Stage s = StageUtils.getATestStage(1, 977, hostname, CLUSTER_HOST_INFO,
       "{\"host_param\":\"param_value\"}", "{\"stage_param\":\"param_value\"}");
     stages.add(s);
@@ -404,16 +404,26 @@ public class TestActionScheduler {
 
     when(db.getCommandsInProgressCount()).thenReturn(stages.size());
     when(db.getStagesInProgress()).thenReturn(stages);
+
     doAnswer(new Answer<Void>() {
       @Override
       public Void answer(InvocationOnMock invocation) throws Throwable {
-        String host = (String) invocation.getArguments()[0];
-        String role = (String) invocation.getArguments()[3];
-        HostRoleCommand command = s.getHostRoleCommand(host, role);
-        command.setStatus(HostRoleStatus.TIMEDOUT);
+        Long requestId = (Long) invocation.getArguments()[1];
+        for (Stage stage : stages) {
+          if (requestId.equals(stage.getRequestId())) {
+            for (HostRoleCommand command : stage.getOrderedHostRoleCommands()) {
+              if (command.getStatus() == HostRoleStatus.QUEUED ||
+                command.getStatus() == HostRoleStatus.IN_PROGRESS ||
+                command.getStatus() == HostRoleStatus.PENDING) {
+                command.setStatus(HostRoleStatus.ABORTED);
+              }
+            }
+          }
+        }
+
         return null;
       }
-    }).when(db).timeoutHostRole(anyString(), anyLong(), anyLong(), anyString(), anyBoolean());
+    }).when(db).abortHostRole(anyString(), anyLong(), anyLong(), anyString(), anyString());
 
     //Small action timeout to test rescheduling
     AmbariEventPublisher aep = EasyMock.createNiceMock(AmbariEventPublisher.class);
@@ -423,18 +433,16 @@ public class TestActionScheduler {
             mock(HostRoleCommandDAO.class), mock(HostRoleCommandFactory.class)).
         addMockedMethod("cancelHostRoleCommands").
         createMock();
-    scheduler.cancelHostRoleCommands(EasyMock.<Collection<HostRoleCommand>>anyObject(),EasyMock.anyObject(String.class));
-    EasyMock.expectLastCall();
     EasyMock.replay(scheduler);
     scheduler.setTaskTimeoutAdjustment(false);
 
     int cycleCount=0;
     while (!stages.get(0).getHostRoleStatus(hostname, "NAMENODE")
-      .equals(HostRoleStatus.TIMEDOUT) && cycleCount++ <= MAX_CYCLE_ITERATIONS) {
+      .equals(HostRoleStatus.ABORTED) && cycleCount++ <= MAX_CYCLE_ITERATIONS) {
       scheduler.doWork();
     }
 
-    Assert.assertEquals(HostRoleStatus.TIMEDOUT,stages.get(0).getHostRoleStatus(hostname, "NAMENODE"));
+    Assert.assertEquals(HostRoleStatus.ABORTED,stages.get(0).getHostRoleStatus(hostname, "NAMENODE"));
 
     EasyMock.verify(scheduler, entityManagerProviderMock);
   }
@@ -503,23 +511,7 @@ public class TestActionScheduler {
     doAnswer(new Answer<Void>() {
       @Override
       public Void answer(InvocationOnMock invocation) throws Throwable {
-        String host = (String) invocation.getArguments()[0];
-        String role = (String) invocation.getArguments()[3];
-        //HostRoleCommand command = stages.get(0).getHostRoleCommand(host, role);
-        for (HostRoleCommand command : stages.get(0).getOrderedHostRoleCommands()) {
-          if (command.getHostName().equals(host) && command.getRole().name()
-              .equals(role)) {
-            command.setStatus(HostRoleStatus.TIMEDOUT);
-          }
-        }
-        return null;
-      }
-    }).when(db).timeoutHostRole(anyString(), anyLong(), anyLong(), anyString(), anyBoolean());
-
-    doAnswer(new Answer<Void>() {
-      @Override
-      public Void answer(InvocationOnMock invocation) throws Throwable {
-        Long requestId = (Long) invocation.getArguments()[0];
+        Long requestId = (Long) invocation.getArguments()[1];
         for (Stage stage : stages) {
           if (requestId.equals(stage.getRequestId())) {
             for (HostRoleCommand command : stage.getOrderedHostRoleCommands()) {
@@ -534,7 +526,7 @@ public class TestActionScheduler {
 
         return null;
       }
-    }).when(db).abortOperation(anyLong());
+    }).when(db).abortHostRole(anyString(), anyLong(), anyLong(), anyString(), anyString());
 
     ArgumentCaptor<ServiceComponentHostEvent> eventsCapture1 =
       ArgumentCaptor.forClass(ServiceComponentHostEvent.class);
@@ -549,12 +541,12 @@ public class TestActionScheduler {
 
     int cycleCount=0;
     while (!(stages.get(0).getHostRoleStatus(hostname1, "DATANODE")
-      .equals(HostRoleStatus.TIMEDOUT) && stages.get(0).getHostRoleStatus
+      .equals(HostRoleStatus.ABORTED) && stages.get(0).getHostRoleStatus
       (hostname2, "NAMENODE").equals(HostRoleStatus.ABORTED)) && cycleCount++ <= MAX_CYCLE_ITERATIONS) {
       scheduler.doWork();
     }
 
-    Assert.assertEquals(HostRoleStatus.TIMEDOUT,
+    Assert.assertEquals(HostRoleStatus.ABORTED,
       stages.get(0).getHostRoleStatus(hostname1, "DATANODE"));
     Assert.assertEquals(HostRoleStatus.ABORTED,
       stages.get(0).getHostRoleStatus(hostname2, "NAMENODE"));
@@ -910,9 +902,7 @@ public class TestActionScheduler {
     EasyMock.expect(fsm.getCluster(EasyMock.anyString())).andReturn(cluster).anyTimes();
     EasyMock.expect(fsm.getHost(EasyMock.anyString())).andReturn(host);
     EasyMock.expect(cluster.getService(EasyMock.anyString())).andReturn(null);
-    EasyMock.expect(host.getLastRegistrationTime()).andReturn(HOST_REGISTRATION_TIME);
     EasyMock.expect(host.getHostName()).andReturn(Stage.INTERNAL_HOSTNAME).anyTimes();
-    EasyMock.expect(host.getState()).andReturn(HostState.HEALTHY);
 
     if (RoleCommand.ACTIONEXECUTE.equals(roleCommand)) {
       EasyMock.expect(cluster.getClusterName()).andReturn("clusterName").anyTimes();


[18/50] [abbrv] ambari git commit: AMBARI-19768. Broken kill_process_with_children shell single liner (dlysnichenko)

Posted by nc...@apache.org.
AMBARI-19768. Broken kill_process_with_children shell single liner (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/41034aa1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/41034aa1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/41034aa1

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 41034aa157cc7b36f1635064055b83deb5f542bc
Parents: 59545f7
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Fri Feb 10 15:15:20 2017 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Fri Feb 10 15:15:20 2017 +0200

----------------------------------------------------------------------
 .../python/ambari_agent/TestProcessUtils.py     | 224 +++++++++++++++++++
 .../src/test/python/ambari_agent/TestShell.py   |   5 +-
 .../main/python/ambari_commons/process_utils.py | 100 +++++++++
 .../src/main/python/ambari_commons/shell.py     |  54 ++---
 4 files changed, 346 insertions(+), 37 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/41034aa1/ambari-agent/src/test/python/ambari_agent/TestProcessUtils.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestProcessUtils.py b/ambari-agent/src/test/python/ambari_agent/TestProcessUtils.py
new file mode 100644
index 0000000..8331910
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestProcessUtils.py
@@ -0,0 +1,224 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from ambari_agent import main
+
+main.MEMORY_LEAK_DEBUG_FILEPATH = "/tmp/memory_leak_debug.out"
+import unittest
+import signal
+import subprocess, time
+from mock.mock import patch, MagicMock, PropertyMock, call
+from ambari_commons import process_utils
+
+process_tree = {"111": "222\n 22",
+                "222": "333\n 33",
+                "22": "44\n 444",}
+
+
+class TestProcessUtils(unittest.TestCase):
+  @patch("subprocess.Popen")
+  def test_kill(self, popen_mock):
+    process_mock = MagicMock()
+    process_mock.communicate.return_value = (None, None)
+    returncode_mock = PropertyMock()
+    returncode_mock.return_value = 0
+    type(process_mock).returncode = returncode_mock
+    popen_mock.return_value = process_mock
+    process_utils.kill_pids(["12321113230", "2312415453"], signal.SIGTERM)
+    expected = [call(['kill', '-15', '12321113230', '2312415453'], stderr=-1, stdout=-1)]
+    self.assertEquals(popen_mock.call_args_list, expected)
+
+  @patch("subprocess.Popen")
+  def test_get_children(self, popen_mock):
+
+    process_mock = MagicMock()
+    process_mock.communicate.return_value = ("123 \n \n 321\n", None)
+    popen_mock.return_value = process_mock
+    returncode_mock = PropertyMock()
+    returncode_mock.return_value = 0
+    type(process_mock).returncode = returncode_mock
+    result = process_utils.get_children("2312415453")
+
+    self.assertEquals(result, ["123", "321"])
+
+    expected = [
+      call(['ps', '-o', 'pid', '--no-headers', '--ppid', '2312415453'], stderr=subprocess.PIPE, stdout=subprocess.PIPE)]
+    self.assertEquals(popen_mock.call_args_list, expected)
+
+  @patch("subprocess.Popen")
+  def test_get_flat_process_tree(self, popen_mock):
+    def side_effect(*args, **kwargs):
+      process_mock = MagicMock()
+      returncode_mock = PropertyMock()
+      returncode_mock.return_value = 0
+      type(process_mock).returncode = returncode_mock
+      if args[0][5] in process_tree.keys():
+        process_mock.communicate.return_value = (process_tree[args[0][5]], None)
+      else:
+        process_mock.communicate.return_value = ("", None)
+      return process_mock
+
+    popen_mock.side_effect = side_effect
+    result = process_utils.get_flat_process_tree("111")
+    self.assertEquals(result, ['111', '222', '333', '33', '22', '44', '444'])
+
+    expected = [call(['ps', '-o', 'pid', '--no-headers', '--ppid', '111'], stderr=-1, stdout=-1),
+                call(['ps', '-o', 'pid', '--no-headers', '--ppid', '222'], stderr=-1, stdout=-1),
+                call(['ps', '-o', 'pid', '--no-headers', '--ppid', '333'], stderr=-1, stdout=-1),
+                call(['ps', '-o', 'pid', '--no-headers', '--ppid', '33'], stderr=-1, stdout=-1),
+                call(['ps', '-o', 'pid', '--no-headers', '--ppid', '22'], stderr=-1, stdout=-1),
+                call(['ps', '-o', 'pid', '--no-headers', '--ppid', '44'], stderr=-1, stdout=-1),
+                call(['ps', '-o', 'pid', '--no-headers', '--ppid', '444'], stderr=-1, stdout=-1)]
+    self.assertEquals(popen_mock.call_args_list, expected)
+
+  @patch("subprocess.Popen")
+  def test_get_command_by_pid(self, popen_mock):
+
+    process_mock = MagicMock()
+    process_mock.communicate.return_value = ("yum something", None)
+    returncode_mock = PropertyMock()
+    returncode_mock.return_value = 0
+    type(process_mock).returncode = returncode_mock
+    popen_mock.return_value = process_mock
+
+    result = process_utils.get_command_by_pid("2312415453")
+
+    self.assertEquals(result, "yum something")
+
+    expected = [call(['ps', '-p', '2312415453', '-o', 'command', '--no-headers'], stderr=-1, stdout=-1)]
+    self.assertEquals(popen_mock.call_args_list, expected)
+
+  @patch("subprocess.Popen")
+  def test_get_command_by_pid_not_exist(self, popen_mock):
+
+    process_mock = MagicMock()
+    process_mock.communicate.return_value = ("", None)
+    returncode_mock = PropertyMock()
+    returncode_mock.return_value = 1
+    type(process_mock).returncode = returncode_mock
+    popen_mock.return_value = process_mock
+
+    result = process_utils.get_command_by_pid("2312415453")
+
+    self.assertEquals(result, "NOT_FOUND[2312415453]")
+
+    expected = [call(['ps', '-p', '2312415453', '-o', 'command', '--no-headers'], stderr=-1, stdout=-1)]
+    self.assertEquals(popen_mock.call_args_list, expected)
+
+  @patch("subprocess.Popen")
+  def test_is_process_running(self, popen_mock):
+
+    process_mock = MagicMock()
+    process_mock.communicate.return_value = ("2312415453", None)
+    returncode_mock = PropertyMock()
+    returncode_mock.return_value = 0
+    type(process_mock).returncode = returncode_mock
+    popen_mock.return_value = process_mock
+
+    result = process_utils.is_process_running("2312415453")
+
+    self.assertEquals(result, True)
+
+    expected = [call(['ps', '-p', '2312415453', '-o', 'pid', '--no-headers'], stderr=-1, stdout=-1)]
+    self.assertEquals(popen_mock.call_args_list, expected)
+
+  @patch("subprocess.Popen")
+  def test_is_process_not_running(self, popen_mock):
+
+    process_mock = MagicMock()
+    process_mock.communicate.return_value = ("", None)
+    returncode_mock = PropertyMock()
+    returncode_mock.return_value = 1
+    type(process_mock).returncode = returncode_mock
+    popen_mock.return_value = process_mock
+
+    result = process_utils.is_process_running("2312415453")
+
+    self.assertEquals(result, False)
+
+    expected = [call(['ps', '-p', '2312415453', '-o', 'pid', '--no-headers'], stderr=-1, stdout=-1)]
+    self.assertEquals(popen_mock.call_args_list, expected)
+
+  @patch("subprocess.Popen")
+  def test_get_processes_running(self, popen_mock):
+    def side_effect(*args, **kwargs):
+      process_mock = MagicMock()
+      returncode_mock = PropertyMock()
+      if args[0][2] == "4321":
+        returncode_mock.return_value = 0
+        process_mock.communicate.return_value = ("4321", None)
+      else:
+        returncode_mock.return_value = 1
+        process_mock.communicate.return_value = (None, None)
+      type(process_mock).returncode = returncode_mock
+      return process_mock
+
+    popen_mock.side_effect = side_effect
+
+    result = process_utils.get_processes_running(["1234", "4321"])
+
+    self.assertEquals(result, ["4321"])
+
+    expected = [call(['ps', '-p', '1234', '-o', 'pid', '--no-headers'], stderr=-1, stdout=-1),
+                call(['ps', '-p', '4321', '-o', 'pid', '--no-headers'], stderr=-1, stdout=-1)]
+    self.assertEquals(popen_mock.call_args_list, expected)
+
+  @patch("time.sleep")
+  @patch("subprocess.Popen")
+  def test_wait_for_process_death(self, popen_mock, sleep_mock):
+
+    process_mock = MagicMock()
+    process_mock.communicate.side_effect = [("4321", None),("4321", None),(None, None)]
+    returncode_mock = PropertyMock()
+    returncode_mock.side_effect = [0, 0, 1]
+    type(process_mock).returncode = returncode_mock
+    popen_mock.return_value = process_mock
+
+    process_utils.wait_for_process_death("4321")
+
+    expected = [call(['ps', '-p', '4321', '-o', 'pid', '--no-headers'], stderr=-1, stdout=-1),
+                call(['ps', '-p', '4321', '-o', 'pid', '--no-headers'], stderr=-1, stdout=-1),
+                call(['ps', '-p', '4321', '-o', 'pid', '--no-headers'], stderr=-1, stdout=-1)]
+    self.assertEquals(popen_mock.call_args_list, expected)
+    expected = [call(0.1), call(0.1)]
+    self.assertEquals(sleep_mock.call_args_list, expected)
+
+  @patch("time.sleep")
+  @patch("subprocess.Popen")
+  def test_wait_for_entire_process_tree_death(self, popen_mock, sleep_mock):
+
+    process_mock = MagicMock()
+    process_mock.communicate.side_effect = [("1234", None), (None, None), ("4321", None), ("4321", None), (None, None)]
+    returncode_mock = PropertyMock()
+    returncode_mock.side_effect = [0, 1, 0, 0, 1]
+    type(process_mock).returncode = returncode_mock
+    popen_mock.return_value = process_mock
+
+    process_utils.wait_for_entire_process_tree_death(["1234", "4321"])
+
+    expected = [call(['ps', '-p', '1234', '-o', 'pid', '--no-headers'], stderr=-1, stdout=-1),
+                call(['ps', '-p', '1234', '-o', 'pid', '--no-headers'], stderr=-1, stdout=-1),
+                call(['ps', '-p', '4321', '-o', 'pid', '--no-headers'], stderr=-1, stdout=-1),
+                call(['ps', '-p', '4321', '-o', 'pid', '--no-headers'], stderr=-1, stdout=-1),
+                call(['ps', '-p', '4321', '-o', 'pid', '--no-headers'], stderr=-1, stdout=-1)]
+    self.assertEquals(popen_mock.call_args_list, expected)
+    expected = [call(0.1), call(0.1), call(0.1)]
+    self.assertEquals(sleep_mock.call_args_list, expected)

http://git-wip-us.apache.org/repos/asf/ambari/blob/41034aa1/ambari-agent/src/test/python/ambari_agent/TestShell.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestShell.py b/ambari-agent/src/test/python/ambari_agent/TestShell.py
index 8d375e3..5dc1899 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestShell.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestShell.py
@@ -57,9 +57,7 @@ class TestShell(unittest.TestCase):
 
   def test_kill_process_with_children(self):
     if _platform == "linux" or _platform == "linux2": # Test is Linux-specific
-      gracefull_kill_delay_old = shell.gracefull_kill_delay
-      shell.gracefull_kill_delay = 0.1
-      sleep_cmd = "sleep 314159265"
+      sleep_cmd = "sleep 314"
       test_cmd = """ (({0}) & ({0} & {0})) """.format(sleep_cmd)
       # Starting process tree (multiple process groups)
       test_process = subprocess.Popen(test_cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True)
@@ -76,7 +74,6 @@ class TestShell(unittest.TestCase):
       ps_process = subprocess.Popen(ps_cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True)
       (out, err) = ps_process.communicate()
       self.assertFalse(sleep_cmd in out)
-      shell.gracefull_kill_delay = gracefull_kill_delay_old
     else:
       # Do not run under other systems
       pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/41034aa1/ambari-common/src/main/python/ambari_commons/process_utils.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/process_utils.py b/ambari-common/src/main/python/ambari_commons/process_utils.py
new file mode 100644
index 0000000..50ffd02
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_commons/process_utils.py
@@ -0,0 +1,100 @@
+# !/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import subprocess
+import time
+
+check_time_delay = 0.1  # seconds between checks of process killed
+
+
+def get_children(pid):
+  PSCMD = ["ps", "-o", "pid", "--no-headers", "--ppid", str(pid)]
+  ps_process = subprocess.Popen(PSCMD, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  stdout, stderr = ps_process.communicate()
+  if ps_process.returncode != 0:
+    return []
+  return stdout.split()
+
+
+def get_flat_process_tree(pid):
+  """
+  :param pid: process id of parent process
+  :return: list of child process pids. Resulting list also includes parent pid
+  """
+  res = [str(pid)]
+  children = get_children(pid)
+  for child in children:
+    res += get_flat_process_tree(child)
+  return res
+
+
+def kill_pids(pids, signal):
+  from resource_management.core.exceptions import Fail
+  CMD = ["kill", "-" + str(signal)]
+  CMD.extend(pids)
+  process = subprocess.Popen(CMD, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  stdout, stderr = process.communicate()
+  if process.returncode != 0:
+    raise Fail("Unable to kill PIDs {0} : {1}".format(str(pids),stderr))
+
+
+def get_command_by_pid(pid):
+  CMD = ["ps", "-p", str(pid), "-o", "command", "--no-headers"]
+  process = subprocess.Popen(CMD, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  stdout, stderr = process.communicate()
+  if process.returncode != 0:
+    return "NOT_FOUND[%s]" % pid
+  return stdout
+
+
+def wait_for_entire_process_tree_death(pids):
+  for child in pids:
+    wait_for_process_death(child)
+
+
+def wait_for_process_death(pid, timeout=5):
+  start = time.time()
+  current_time = start
+  while is_process_running(pid) and current_time < start + timeout:
+    time.sleep(check_time_delay)
+    current_time = time.time()
+
+
+def is_process_running(pid):
+  CMD = ["ps", "-p", str(pid), "-o", "pid", "--no-headers"]
+  process = subprocess.Popen(CMD, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  stdout, stderr = process.communicate()
+  if process.returncode != 0:
+    return False
+  return pid in stdout
+
+
+
+def get_processes_running(process_pids):
+  """
+  Checks what processes are still running
+  :param process_pids: list of process pids
+  :return: list of pids for processes that are still running
+  """
+  result = []
+  for pid in process_pids:
+    if is_process_running(pid):
+      result.append(pid)
+  return result

http://git-wip-us.apache.org/repos/asf/ambari/blob/41034aa1/ambari-common/src/main/python/ambari_commons/shell.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/shell.py b/ambari-common/src/main/python/ambari_commons/shell.py
index 8d26599..a4c91fb 100644
--- a/ambari-common/src/main/python/ambari_commons/shell.py
+++ b/ambari-common/src/main/python/ambari_commons/shell.py
@@ -19,25 +19,20 @@ limitations under the License.
 '''
 
 import logging
-import subprocess
 import os
-import tempfile
 import signal
-import sys
+import subprocess
 import threading
-import time
-import traceback
-import pprint
-import platform
 
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl
+from ambari_commons.process_utils import get_flat_process_tree, kill_pids, wait_for_entire_process_tree_death, \
+  get_processes_running, get_command_by_pid
 
 logger = logging.getLogger()
 
 shellRunner = None
 threadLocal = threading.local()
-gracefull_kill_delay = 5  # seconds between SIGTERM and SIGKILL
 
 tempFiles = []
 
@@ -106,38 +101,31 @@ class shellRunnerWindows(shellRunner):
 #linux specific code
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def kill_process_with_children(parent_pid):
-  def kill_tree_function(pid, signal):
-    '''
-    Kills process tree starting from a given pid.
-    '''
-    # The command below starts 'ps' linux utility and then parses it's
-    # output using 'awk'. AWK recursively extracts PIDs of all children of
-    # a given PID and then passes list of "kill -<SIGNAL> PID" commands to 'sh'
-    # shell.
-    CMD = """ps xf | awk -v PID=""" + str(pid) + \
-          """ ' $1 == PID { P = $1; next } P && /_/ { P = P " " $1;""" + \
-          """K=P } P && !/_/ { P="" }  END { print "kill -""" \
-          + str(signal) + """ "K }' | sh """
-    process = subprocess.Popen(CMD, stdout=subprocess.PIPE,
-                               stderr=subprocess.PIPE, shell=True)
-    process.communicate()
-
-  _run_kill_function(kill_tree_function, parent_pid)
-
-
-def _run_kill_function(kill_function, pid):
+  """
+  Kills process tree starting from a given pid.
+  :param parent_pid: head of tree
+  :param graceful_kill_delays: map <command name, custom delay between SIGTERM and SIGKILL>
+  :return:
+  """
+
+  pids = get_flat_process_tree(parent_pid)
   try:
-    kill_function(pid, signal.SIGTERM)
+    kill_pids(pids, signal.SIGTERM)
   except Exception, e:
-    logger.warn("Failed to kill PID %d" % (pid))
+    logger.warn("Failed to kill PID %d" % parent_pid)
     logger.warn("Reported error: " + repr(e))
 
-  time.sleep(gracefull_kill_delay)
+  wait_for_entire_process_tree_death(pids)
 
   try:
-    kill_function(pid, signal.SIGKILL)
+    running_processes = get_processes_running(pids)
+    if running_processes:
+      process_names = map(lambda x: get_command_by_pid(x),  running_processes)
+      logger.warn("These PIDs %s did not die after SIGTERM, sending SIGKILL. Exact commands to be killed:\n %s" %
+                  (", ".join(running_processes), "\n".join(process_names)))
+      kill_pids(running_processes, signal.SIGKILL)
   except Exception, e:
-    logger.error("Failed to send SIGKILL to PID %d. Process exited?" % (pid))
+    logger.error("Failed to send SIGKILL to PID %d. Process exited?" % parent_pid)
     logger.error("Reported error: " + repr(e))
 
 


[15/50] [abbrv] ambari git commit: AMBARI-19954.Workflow designer is getting hung while importing the workflow.(Padma Priya N via gauravn7)

Posted by nc...@apache.org.
AMBARI-19954.Workflow designer is getting hung while importing the workflow.(Padma Priya N via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1339d52b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1339d52b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1339d52b

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 1339d52bb1e685bb363157ac949e52da970113b8
Parents: d7e11e9
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Fri Feb 10 16:48:56 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Fri Feb 10 16:49:45 2017 +0530

----------------------------------------------------------------------
 .../ui/app/components/decision-add-branch.js    |  3 +-
 .../ui/app/components/flow-designer.js          | 25 ++++++-------
 .../ui/app/components/transition-config.js      |  2 +-
 .../ui/app/domain/cytoscape-flow-renderer.js    | 38 ++++++++++++++++++--
 .../app/templates/components/flow-designer.hbs  |  2 +-
 5 files changed, 49 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1339d52b/contrib/views/wfmanager/src/main/resources/ui/app/components/decision-add-branch.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/decision-add-branch.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/decision-add-branch.js
index e4b2224..65d0974 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/decision-add-branch.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/decision-add-branch.js
@@ -54,8 +54,9 @@ export default Ember.Component.extend(Validations, FindNodeMixin,{
         }
         self.set("isInsertAction",false);
         this.set("newNodeType",null);
+        this.get('flowRenderer').populateOkToandErrorTONodes(node);
         var commonTarget=this.findCommonTargetNode(this.workflow.startNode,this.get('node'));
-        var descendantNodes=this.getDesendantNodes(this.get('node'));
+        var descendantNodes= this.get('node.validOkToNodes');
         if (commonTarget){
           descendantNodes.removeObject(commonTarget);
           descendantNodes.unshiftObject(commonTarget);

http://git-wip-us.apache.org/repos/asf/ambari/blob/1339d52b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
index fd7a258..83a1b27 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
@@ -60,7 +60,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
   previewXml:"",
   supportedActionTypes:["java", "hive", "pig", "sqoop", "shell", "spark", "map-reduce", "hive2", "sub-workflow", "distcp", "ssh", "FS"],
   workflow:null,
-  hoveredWidget:null,/**/
+  flowRenderer:null,
+  hoveredWidget:null,
   showingConfirmationNewWorkflow:false,
   showingWorkflowConfigProps:false,
   workflowSubmitConfigs:{},
@@ -107,16 +108,12 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
     var id = 'cy-' + Math.ceil(Math.random() * 1000);
     this.set('cyId', id);
     this.sendAction('register', this.get('tabInfo'), this);
+    this.set('flowRenderer',CytoscapeRenderer.create());
+    this.set('workflow',Workflow.create({}));
     CommonUtils.setTestContext(this);
   }.on('init'),
   elementsInserted :function(){
-    if (this.useCytoscape){
-      this.flowRenderer=CytoscapeRenderer.create({id : this.get('cyId')});
-    }else{
-      this.flowRenderer=JSPlumbRenderer.create({});
-    }
     this.setConentWidth();
-    this.set('workflow',Workflow.create({}));
     if(this.get("xmlAppPath")){
       this.showExistingWorkflow();
       return;
@@ -241,14 +238,13 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
   },
   nodeRendered: function(){
     this.doValidation();
-    if(this.get('renderNodeTransitions')){
-      this.flowRenderer.onDidUpdate(this,this.get("workflow").startNode,this.get("workflow"));
-      this.layout();
-      this.set('renderNodeTransitions',false);
-    }
     this.resize();
     this.persistWorkInProgress();
   }.on('didUpdate'),
+  renderTransitions : function(){
+    this.flowRenderer.onDidUpdate(this,this.get("workflow").startNode,this.get("workflow"));
+    this.layout();
+  },
   resize(){
     this.flowRenderer.resize();
   },
@@ -261,12 +257,13 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
     var canvasHeight=Ember.$(window).height()-panelOffset.top-25;
     this.flowRenderer.initRenderer(function(){
       this.renderWorkflow();
-    }.bind(this),{context:this,flattenedNodes:this.get("flattenedNodes"),dataNodes:this.get("dataNodes"), cyOverflow:this.get("cyOverflow"),canvasHeight:canvasHeight});
+    }.bind(this),{context:this,id : this.get('cyId'),flattenedNodes:this.get("flattenedNodes"),dataNodes:this.get("dataNodes"), cyOverflow:this.get("cyOverflow"),canvasHeight:canvasHeight});
   },
   renderWorkflow(){
     this.set('renderNodeTransitions', true);
     this.flowRenderer.renderWorkflow(this.get("workflow"));
     this.doValidation();
+    this.renderTransitions();
   },
   rerender(){
     this.flowRenderer.cleanup();
@@ -656,7 +653,6 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
   },
   openWorkflowEditor(node){
     this.createSnapshot();
-    var validOkToNodes = WorkflowPathUtil.findValidTransitionsTo(this.get('workflow'), node);
     this.set('showActionEditor', true);
     this.set('currentAction', node.actionType);
     var domain = node.getNodeDetail();
@@ -664,7 +660,6 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
     this.set('clonedErrorNode', node.errorNode);
     this.set('clonedKillMessage',node.get('killMessage'));
     node.set("domain", domain);
-    node.set("validOkToNodes", validOkToNodes);
     this.set('currentNode', node);
   },
   openDecisionEditor(node) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/1339d52b/contrib/views/wfmanager/src/main/resources/ui/app/components/transition-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/transition-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/transition-config.js
index ca45b1f..ce04863 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/transition-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/transition-config.js
@@ -28,7 +28,7 @@ const Validations = buildValidations({
 export default Ember.Component.extend(FindNodeMixin, Validations, {
   selectedKillNode : '',
   initialize : function(){
-    this.set('descendantNodes',this.getDesendantNodes(this.get('currentNode')));
+    this.set('descendantNodes', this.get('currentNode.validErrorToNodes'));
     if(!this.get('transition.okToNode')){
       var defaultOkToNode = this.getOKToNode(this.get('currentNode'));
       this.set('transition.okToNode', defaultOkToNode);

http://git-wip-us.apache.org/repos/asf/ambari/blob/1339d52b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
index 17fb4a0..8202c6d 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
@@ -30,7 +30,8 @@ var CytoscapeRenderer= Ember.Object.extend({
       container: this.get("context").$('#'+this.id),
       elements: [],
       style: CytoscapeStyles.style,
-      layout: this.get("layoutConfigs")
+      layout: this.get("layoutConfigs"),
+      pixelRatio : 1
     });
 
     // the default values of each option are outlined below:
@@ -98,6 +99,7 @@ var CytoscapeRenderer= Ember.Object.extend({
         data: {
           id: node.id, name: node.name, type: node.type,
           shape: self._getShape(node.type),
+          type : node.type,
           node: node
         },
         dataNodeName: Ember.computed.alias('data.node.name')
@@ -298,15 +300,44 @@ var CytoscapeRenderer= Ember.Object.extend({
 
     this.get("context").$('.overlay-settings-icon i').off('click');
     this.get("context").$('.overlay-settings-icon i').on('click',function(){
-      this.get("context").openWorkflowEditor(this.get("context").$(".overlay-settings-icon").data("node"));
+      let node = this.get("context").$(".overlay-settings-icon").data("node");
+      this.populateOkToandErrorTONodes(node);
+      this.get("context").openWorkflowEditor(node);
       this.get("context").$('.overlay-node-actions').hide();
     }.bind(this));
   },
-
+  populateOkToandErrorTONodes(node){
+    let alternatePathNodes = this.cy.$('#'+node.id).predecessors("node[name][type='decision']").union(this.cy.$('#'+node.id).predecessors("node[name][type='decision']"));
+    let descendantNodes = [];
+    if(alternatePathNodes.length > 0){
+      alternatePathNodes.forEach(childNode =>{
+        let childNodeData = childNode.data();
+        if(childNodeData.type === 'placeholder'){
+          return;
+        }
+        let successors = this.cy.$(`#${childNodeData.id}`).successors("node[name]").difference(this.cy.$('#'+node.id).incomers("node[name]"));
+        descendantNodes.pushObjects(successors.jsons().mapBy('data.node'));
+      });
+    }else{
+      descendantNodes.pushObjects(this.cy.$(`#${node.id}`).successors("node[name]").jsons().mapBy('data.node'));
+    }
+    let okToNodes = [];
+    let errorToNodes = [];
+    okToNodes = descendantNodes.reject((descendantNode)=>{
+      return descendantNode.get('type') === 'placeholder' || descendantNode.get('type') === 'kill' || descendantNode.id === node.id;
+    }, this);
+    errorToNodes = descendantNodes.reject((descendantNode)=>{
+      return descendantNode.get('type') === 'placeholder' || descendantNode.id === node.id;
+    }, this);
+    node.set('validOkToNodes', okToNodes);
+    node.set('validErrorToNodes', errorToNodes);
+  },
   renderWorkflow(workflow){
     this._getCyDataNodes(workflow);
+    this.cy.startBatch();
     this.cy.$('node').remove();
     this.cy.add(this.get('dataNodes'));
+    this.cy.endBatch();
     this.cy.layout(this.get("layoutConfigs"));
     this._setCyOverflow();
   },
@@ -315,6 +346,7 @@ var CytoscapeRenderer= Ember.Object.extend({
     this.context=settings.context;
     this.dataNodes=settings.dataNodes;
     this.cyOverflow=settings.cyOverflow;
+    this.id=settings.id;
     this._initCY(settings);
     callback();
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/1339d52b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
index b9ecb11..00e8b76 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
@@ -260,7 +260,7 @@
               <i class="fa fa-cloud-upload"></i>
             </span>
           </div>
-          {{decision-add-branch node=node registerAddBranchAction="registerAddBranchAction" addDecisionBranch="addDecisionBranch" workflow=workflow}}
+          {{decision-add-branch node=node registerAddBranchAction="registerAddBranchAction" addDecisionBranch="addDecisionBranch" workflow=workflow flowRenderer=flowRenderer}}
         </div>
           {{#if cyOverflow.overflown}}
             <div class="cyScrollMsg"><i class="fa fa-ellipsis-h cyScrollMsgContent" title="Use the pan tool or drag on canvas to see more" aria-hidden="true"></i></div>


[43/50] [abbrv] ambari git commit: AMBARI-19944 Increase SNMP Unit Test Coverage (dsen)

Posted by nc...@apache.org.
AMBARI-19944 Increase SNMP Unit Test Coverage (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b8176ff6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b8176ff6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b8176ff6

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b8176ff63c00115156062a2af9d2e2cae164e514
Parents: a906061
Author: Dmytro Sen <ds...@apache.org>
Authored: Mon Feb 13 12:42:06 2017 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Mon Feb 13 12:42:06 2017 +0200

----------------------------------------------------------------------
 .../AlertNoticeDispatchServiceTest.java         | 159 ++++++++++++++++++-
 1 file changed, 152 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b8176ff6/ambari-server/src/test/java/org/apache/ambari/server/state/services/AlertNoticeDispatchServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/services/AlertNoticeDispatchServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/services/AlertNoticeDispatchServiceTest.java
index 07094a8..8423eaf 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/services/AlertNoticeDispatchServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/services/AlertNoticeDispatchServiceTest.java
@@ -24,13 +24,16 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 
+import java.math.BigDecimal;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.EnumSet;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.TimeZone;
 import java.util.UUID;
+import java.util.Vector;
 import java.util.concurrent.Executor;
 
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
@@ -38,6 +41,7 @@ import org.apache.ambari.server.notifications.DispatchFactory;
 import org.apache.ambari.server.notifications.Notification;
 import org.apache.ambari.server.notifications.NotificationDispatcher;
 import org.apache.ambari.server.notifications.TargetConfigurationResult;
+import org.apache.ambari.server.notifications.dispatchers.AmbariSNMPDispatcher;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.AlertDispatchDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
@@ -48,10 +52,24 @@ import org.apache.ambari.server.state.AlertState;
 import org.apache.ambari.server.state.NotificationState;
 import org.apache.ambari.server.state.alert.Scope;
 import org.apache.ambari.server.state.alert.SourceType;
+import org.apache.ambari.server.state.alert.TargetType;
 import org.easymock.EasyMock;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import org.snmp4j.CommandResponder;
+import org.snmp4j.CommandResponderEvent;
+import org.snmp4j.PDU;
+import org.snmp4j.Snmp;
+import org.snmp4j.TransportMapping;
+import org.snmp4j.mp.SnmpConstants;
+import org.snmp4j.smi.Address;
+import org.snmp4j.smi.GenericAddress;
+import org.snmp4j.smi.Integer32;
+import org.snmp4j.smi.OID;
+import org.snmp4j.smi.OctetString;
+import org.snmp4j.smi.VariableBinding;
+import org.snmp4j.transport.DefaultUdpTransportMapping;
 
 import com.google.inject.Binder;
 import com.google.inject.Guice;
@@ -240,10 +258,10 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
    * @throws Exception
    */
   @Test
-  public void testSingleDispatch() throws Exception {
+  public void testSingleSnmpDispatch() throws Exception {
     MockSnmpDispatcher dispatcher = new MockSnmpDispatcher();
 
-    List<AlertNoticeEntity> notices = getSnmpMockNotices();
+    List<AlertNoticeEntity> notices = getSnmpMockNotices("SNMP");
     AlertNoticeEntity notice1 = notices.get(0);
     AlertNoticeEntity notice2 = notices.get(1);
 
@@ -269,6 +287,105 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
   }
 
   /**
+   * Tests a digest dispatch for Ambari SNMP.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testAmbariSnmpSingleDispatch() throws Exception {
+    MockAmbariSnmpDispatcher dispatcher = new MockAmbariSnmpDispatcher();
+
+    List<AlertNoticeEntity> notices = getSnmpMockNotices("AMBARI_SNMP");
+    AlertNoticeEntity notice1 = notices.get(0);
+    AlertNoticeEntity notice2 = notices.get(1);
+
+    EasyMock.expect(m_dao.findPendingNotices()).andReturn(notices).once();
+    EasyMock.expect(m_dao.merge(notice1)).andReturn(notice1).once();
+    EasyMock.expect(m_dao.merge(notice2)).andReturn(notice2).once();
+    EasyMock.expect(m_dispatchFactory.getDispatcher("AMBARI_SNMP")).andReturn(dispatcher).atLeastOnce();
+
+    EasyMock.replay(m_dao, m_dispatchFactory);
+
+    // "startup" the service so that its initialization is done
+    AlertNoticeDispatchService service = m_injector.getInstance(AlertNoticeDispatchService.class);
+    service.startUp();
+
+    // service trigger with mock executor that blocks
+    service.setExecutor(new MockExecutor());
+    service.runOneIteration();
+
+    EasyMock.verify(m_dao, m_dispatchFactory);
+
+    List<Notification> notifications = dispatcher.getNotifications();
+    assertEquals(2, notifications.size());
+  }
+
+  /**
+   * Tests a real dispatch for Ambari SNMP.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testAmbariSnmpRealDispatch() throws Exception {
+    AmbariSNMPDispatcher dispatcher = new AmbariSNMPDispatcher(8081);
+
+    List<AlertNoticeEntity> notices = getSnmpMockNotices("AMBARI_SNMP");
+    AlertNoticeEntity notice1 = notices.get(0);
+    AlertNoticeEntity notice2 = notices.get(1);
+
+    EasyMock.expect(m_dao.findPendingNotices()).andReturn(notices).once();
+    EasyMock.expect(m_dao.merge(notice1)).andReturn(notice1).once();
+    EasyMock.expect(m_dao.merge(notice2)).andReturn(notice2).once();
+    EasyMock.expect(m_dispatchFactory.getDispatcher("AMBARI_SNMP")).andReturn(dispatcher).once();
+    EasyMock.expect(m_dao.findNoticeByUuid(ALERT_NOTICE_UUID_1)).andReturn(notice1).once();
+    EasyMock.expect(m_dao.merge(notice1)).andReturn(notice1).once();
+    EasyMock.expect(m_dao.findNoticeByUuid(ALERT_NOTICE_UUID_2)).andReturn(notice2).once();
+    EasyMock.expect(m_dao.merge(notice2)).andReturn(notice2).once();
+    EasyMock.replay(m_dao, m_dispatchFactory);
+
+    // "startup" the service so that its initialization is done
+    AlertNoticeDispatchService service = m_injector.getInstance(AlertNoticeDispatchService.class);
+    service.startUp();
+
+    // service trigger with mock executor that blocks
+    service.setExecutor(new MockExecutor());
+    SnmpReceiver snmpReceiver = new SnmpReceiver();
+
+    service.runOneIteration();
+    Thread.sleep(1000);
+
+    EasyMock.verify(m_dao, m_dispatchFactory);
+
+    List<Vector> expectedTrapVectors = new LinkedList<>();
+    Vector firstVector = new Vector();
+    firstVector.add(new VariableBinding(SnmpConstants.snmpTrapOID, new OID(AmbariSNMPDispatcher.AMBARI_ALERT_TRAP_OID)));
+    firstVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_ID_OID), new Integer32(new BigDecimal(1L).intValueExact())));
+    firstVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_NAME_OID), new OctetString("alert-definition-1")));
+    firstVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_DEFINITION_HASH_OID), new OctetString("1")));
+    firstVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_NAME_OID), new OctetString("Alert Definition 1")));
+
+    Vector secondVector = new Vector(firstVector);
+
+    firstVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_TEXT_OID), new OctetString(ALERT_UNIQUE_TEXT)));
+    firstVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_STATE_OID), new Integer32(0)));
+    firstVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_HOST_NAME_OID), new OctetString("null")));
+    firstVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_SERVICE_NAME_OID), new OctetString("HDFS")));
+    firstVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_COMPONENT_NAME_OID), new OctetString("null")));
+
+    secondVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_TEXT_OID), new OctetString(ALERT_UNIQUE_TEXT + " CRITICAL")));
+    secondVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_STATE_OID), new Integer32(3)));
+    secondVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_HOST_NAME_OID), new OctetString("null")));
+    secondVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_SERVICE_NAME_OID), new OctetString("HDFS")));
+    secondVector.add(new VariableBinding(new OID(AmbariSNMPDispatcher.AMBARI_ALERT_COMPONENT_NAME_OID), new OctetString("null")));
+
+    expectedTrapVectors.add(firstVector);
+    expectedTrapVectors.add(secondVector);
+    assertNotNull(snmpReceiver.receivedTrapsVectors);
+    assertTrue(snmpReceiver.receivedTrapsVectors.size() == 2);
+    assertEquals(expectedTrapVectors, snmpReceiver.receivedTrapsVectors);
+  }
+
+  /**
    * Tests that a failed dispatch invokes the callback to mark the UUIDs of the
    * notices as FAILED.
    *
@@ -384,11 +501,11 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
   }
 
   /**
-   * Gets 2 PENDING notices for SNMP.
+   * Gets 2 PENDING notices for SNMP or AMBARI_SNMP notificationType.
    *
    * @return
    */
-  private List<AlertNoticeEntity> getSnmpMockNotices() {
+  private List<AlertNoticeEntity> getSnmpMockNotices(String notificationType) {
     AlertDefinitionEntity definition = new AlertDefinitionEntity();
     definition.setDefinitionId(1L);
     definition.setDefinitionName("alert-definition-1");
@@ -417,9 +534,10 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
     target.setAlertStates(EnumSet.allOf(AlertState.class));
     target.setTargetName("Alert Target");
     target.setDescription("Mock Target");
-    target.setNotificationType("SNMP");
+    target.setNotificationType(notificationType);
 
-    String properties = "{ \"foo\" : \"bar\" }";
+    String properties = "{ \"ambari.dispatch.snmp.version\": \"SNMPv1\", \"ambari.dispatch.snmp.port\": \"8000\"," +
+                         " \"ambari.dispatch.recipients\": [\"127.0.0.1\"],\"ambari.dispatch.snmp.community\":\"\" }";
     target.setProperties(properties);
 
     AlertNoticeEntity notice1 = new AlertNoticeEntity();
@@ -493,7 +611,7 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
   /**
    * A mock dispatcher that captures the {@link Notification}.
    */
-  private static final class MockSnmpDispatcher implements
+  private static class MockSnmpDispatcher implements
       NotificationDispatcher {
 
     private List<Notification> m_notifications = new ArrayList<Notification>();
@@ -541,6 +659,11 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
     }
   }
 
+  private static final class MockAmbariSnmpDispatcher extends MockSnmpDispatcher {
+    @Override
+    public String getType() { return TargetType.AMBARI_SNMP.name();}
+  }
+
   /**
    * A mock dispatcher that captures the {@link Notification}.
    */
@@ -624,4 +747,26 @@ public class AlertNoticeDispatchServiceTest extends AlertNoticeDispatchService {
       EasyMock.replay(m_metaInfo);
     }
   }
+
+  private class SnmpReceiver {
+    private Snmp snmp = null;
+    private Address targetAddress = GenericAddress.parse("udp:127.0.0.1/8000");
+    private TransportMapping transport = null;
+    public List<Vector> receivedTrapsVectors = null;
+    public SnmpReceiver() throws Exception{
+      transport = new DefaultUdpTransportMapping();
+      snmp = new Snmp(transport);
+      receivedTrapsVectors = new LinkedList<>();
+
+      CommandResponder trapPrinter = new CommandResponder() {
+        public synchronized void processPdu(CommandResponderEvent e){
+          PDU command = e.getPDU();
+          if (command != null) {
+            receivedTrapsVectors.add(command.getVariableBindings());
+          }
+        }
+      };
+      snmp.addNotificationListener(targetAddress, trapPrinter);
+    }
+  }
 }


[49/50] [abbrv] ambari git commit: AMBARI-19970 : AMS graphs are not present on cluster with SSL (Commit 2). (avijayan)

Posted by nc...@apache.org.
AMBARI-19970 : AMS graphs are not present on cluster with SSL (Commit 2). (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7cb9a6ad
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7cb9a6ad
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7cb9a6ad

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 7cb9a6ad53a926f055c5bf78ac2a9a8bd1668d78
Parents: d252665
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Mon Feb 13 10:56:46 2017 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Mon Feb 13 10:56:46 2017 -0800

----------------------------------------------------------------------
 .../0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2  | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7cb9a6ad/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
index 8c20f2b..8c6f86f 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
@@ -42,16 +42,19 @@ hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 hbase.period=30
 hbase.collector.hosts={{ams_collector_hosts}}
 hbase.port={{metric_collector_port}}
+hbase.protocol={{metric_collector_protocol}}
 
 jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 jvm.period=30
 jvm.collector.hosts={{ams_collector_hosts}}
 jvm.port={{metric_collector_port}}
+jvm.protocol={{metric_collector_protocol}}
 
 rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 rpc.period=30
 rpc.collector.hosts={{ams_collector_hosts}}
 rpc.port={{metric_collector_port}}
+rpc.protocol={{metric_collector_protocol}}
 
 *.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
 *.sink.timeline.slave.host.name={{hostname}}
@@ -60,6 +63,7 @@ hbase.sink.timeline.period={{metrics_collection_period}}
 hbase.sink.timeline.sendInterval={{metrics_report_interval}}000
 hbase.sink.timeline.collector.hosts={{ams_collector_hosts}}
 hbase.sink.timeline.port={{metric_collector_port}}
+hbase.sink.timeline.protocol={{metric_collector_protocol}}
 hbase.sink.timeline.serviceName-prefix=ams
 
 # HTTPS properties


[27/50] [abbrv] ambari git commit: AMBARI-19357. Flume metrics can't show if hostname of flume agent is not lowercase. (Yao Lei via swagle)

Posted by nc...@apache.org.
AMBARI-19357. Flume metrics can't show if hostname of flume agent  is not lowercase. (Yao Lei via swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/38a17a7f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/38a17a7f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/38a17a7f

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 38a17a7fb4a27c7ab2bd3d4e87d8ffd0f4150079
Parents: 4381561
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Fri Feb 10 13:31:39 2017 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Fri Feb 10 13:31:39 2017 -0800

----------------------------------------------------------------------
 .../hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java       | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/38a17a7f/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
index 86e092a..3fdf3f4 100644
--- a/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
@@ -88,6 +88,8 @@ public class FlumeTimelineMetricsSink extends AbstractTimelineMetricsSink implem
       if ((hostname == null) || (!hostname.contains("."))) {
         hostname = InetAddress.getLocalHost().getCanonicalHostName();
       }
+      hostname = hostname.toLowerCase();
+
     } catch (UnknownHostException e) {
       LOG.error("Could not identify hostname.");
       throw new FlumeException("Could not identify hostname.", e);


[48/50] [abbrv] ambari git commit: AMBARI-19987 Oozie start failed after enabling credential store (dsen)

Posted by nc...@apache.org.
AMBARI-19987 Oozie start failed after enabling credential store (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d252665c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d252665c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d252665c

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d252665ce94f532b08efb9bc5f31f7b8f6cde97a
Parents: fa32fec
Author: Dmytro Sen <ds...@apache.org>
Authored: Mon Feb 13 19:42:59 2017 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Mon Feb 13 19:42:59 2017 +0200

----------------------------------------------------------------------
 .../ambari_agent/CustomServiceOrchestrator.py   | 12 +++++-
 .../ambari_commons/credential_store_helper.py   | 45 ++++++++++++++++++++
 .../query/render/ClusterBlueprintRenderer.java  |  8 ++--
 .../internal/ServiceResourceProvider.java       |  1 -
 .../orm/entities/ServiceDesiredStateEntity.java | 21 ---------
 .../org/apache/ambari/server/state/Service.java |  8 ----
 .../apache/ambari/server/state/ServiceImpl.java | 40 +++--------------
 .../server/upgrade/UpgradeCatalog250.java       |  5 ---
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |  1 -
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |  1 -
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |  1 -
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |  1 -
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |  1 -
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |  1 -
 .../0.12.0.2.0/package/scripts/params_linux.py  | 35 +++------------
 .../4.0.0.2.0/package/scripts/params_linux.py   | 20 ++++++++-
 .../server/upgrade/UpgradeCatalog250Test.java   | 12 +-----
 17 files changed, 90 insertions(+), 123 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index bacda46..9f2852b 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -238,6 +238,7 @@ class CustomServiceOrchestrator():
                 value_names.append(value_name) # Gather the value_name for deletion
           if len(credentials) > 0:
             configtype_credentials[config_type] = credentials
+            logger.info("Identifying config {0} for CS: ".format(config_type))
           for value_name in value_names:
             # Remove the clear text password
             config.pop(value_name, None)
@@ -255,8 +256,11 @@ class CustomServiceOrchestrator():
     roleCommand = None
     if 'roleCommand' in commandJson:
       roleCommand = commandJson['roleCommand']
+    task_id = None
+    if 'taskId' in commandJson:
+      task_id = commandJson['taskId']
 
-    logger.info('generateJceks: roleCommand={0}'.format(roleCommand))
+    logger.info('Generating the JCEKS file: roleCommand={0} and taskId = {1}'.format(roleCommand, task_id))
 
     # Set up the variables for the external command to generate a JCEKS file
     java_home = commandJson['hostLevelParams']['java_home']
@@ -267,6 +271,12 @@ class CustomServiceOrchestrator():
 
     # Gather the password values and remove them from the configuration
     configtype_credentials = self.getConfigTypeCredentials(commandJson)
+
+    # CS is enabled but no config property is available for this command
+    if len(configtype_credentials) == 0:
+      logger.info("Credential store is enabled but no property are found that can be encrypted.")
+      commandJson['credentialStoreEnabled'] = "false"
+
     for config_type, credentials in configtype_credentials.items():
       config = commandJson['configurations'][config_type]
       file_path = os.path.join(self.getProviderDirectory(serviceName), "{0}.jceks".format(config_type))

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-common/src/main/python/ambari_commons/credential_store_helper.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/credential_store_helper.py b/ambari-common/src/main/python/ambari_commons/credential_store_helper.py
new file mode 100644
index 0000000..914c1c7
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_commons/credential_store_helper.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import os
+
+from resource_management.core.resources.system import File
+from resource_management.core.shell import checked_call
+from resource_management.core.source import DownloadSource
+
+credential_util_cmd = 'org.apache.ambari.server.credentialapi.CredentialUtil'
+credential_util_jar = 'CredentialUtil.jar'
+
+def get_password_from_credential_store(alias, provider_path, cs_lib_path, java_home, jdk_location):
+    # Try to download CredentialUtil.jar from ambari-server resources
+    credential_util_dir = cs_lib_path.split('*')[0] # Remove the trailing '*'
+    credential_util_path = os.path.join(credential_util_dir, credential_util_jar)
+    credential_util_url =  jdk_location + credential_util_jar
+    File(credential_util_path,
+         content = DownloadSource(credential_util_url),
+         mode = 0755,
+         )
+
+    # Execute a get command on the CredentialUtil CLI to get the password for the specified alias
+    java_bin = '{java_home}/bin/java'.format(java_home=java_home)
+    cmd = (java_bin, '-cp', cs_lib_path, credential_util_cmd, 'get', alias, '-provider', provider_path)
+    cmd_result, std_out_msg  = checked_call(cmd)
+    std_out_lines = std_out_msg.split('\n')
+    return std_out_lines[-1] # Get the last line of the output, to skip warnings if any.
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java b/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
index 4091ee8..5e19a6c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
@@ -286,11 +286,9 @@ public class ClusterBlueprintRenderer extends BaseRenderer implements Renderer {
 
       //service_settings population
       property = new HashMap<>();
-      if (ServiceInfoMap.get("credential_store_supported").equals("true")) {
-        if (ServiceInfoMap.get("credential_store_enabled").equals("true")) {
-          property.put("name", ServiceInfoMap.get("service_name").toString());
-          property.put("credential_store_enabled", "true");
-        }
+      if (ServiceInfoMap.get("credential_store_enabled").equals("true")) {
+        property.put("name", ServiceInfoMap.get("service_name").toString());
+        property.put("credential_store_enabled", "true");
       }
 
       //Fetch the service Components to obtain ServiceComponentInfo

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
index 0d5c174..99a81c1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
@@ -368,7 +368,6 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
       AmbariMetaInfo ambariMetaInfo = getManagementController().getAmbariMetaInfo();
       ServiceInfo serviceInfo = ambariMetaInfo.getService(stackId.getStackName(),
           stackId.getStackVersion(), request.getServiceName());
-      s.setCredentialStoreSupported(serviceInfo.isCredentialStoreSupported());
       LOG.info("Service: {}, credential_store_supported from stack definition:{}", request.getServiceName(),
           serviceInfo.isCredentialStoreSupported());
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceDesiredStateEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceDesiredStateEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceDesiredStateEntity.java
index e4401a1..885f995 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceDesiredStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceDesiredStateEntity.java
@@ -67,9 +67,6 @@ public class ServiceDesiredStateEntity {
   @Enumerated(value = EnumType.STRING)
   private SecurityState securityState = SecurityState.UNSECURED;
 
-  @Column(name = "credential_store_supported", nullable = false, insertable = true, updatable = true)
-  private short credentialStoreSupported = 0;
-
   @Column(name = "credential_store_enabled", nullable = false, insertable = true, updatable = true)
   private short credentialStoreEnabled = 0;
 
@@ -138,24 +135,6 @@ public class ServiceDesiredStateEntity {
   }
 
   /**
-   * Gets a value indicating if credential store is supported or not.
-   *
-   * @return true or false
-   */
-  public boolean isCredentialStoreSupported() {
-    return credentialStoreSupported != 0;
-  }
-
-  /**
-   * Sets a value indicating if credential store is supported or not.
-   *
-   * @param credentialStoreSupported
-   */
-  public void setCredentialStoreSupported(boolean credentialStoreSupported) {
-    this.credentialStoreSupported = (short)((credentialStoreSupported == false) ? 0 : 1);
-  }
-
-  /**
    * Gets a value indicating if credential store use is enabled or not.
    *
    * @return true or false

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
index cf36a8b..0f425a4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
@@ -110,14 +110,6 @@ public interface Service {
   boolean isCredentialStoreSupported();
 
   /**
-   * Set a true or false value specifying if this
-   * service supports credential store.
-   *
-   * @param credentialStoreSupported - true or false
-   */
-  void setCredentialStoreSupported(boolean credentialStoreSupported);
-
-  /**
    * Get a true or false value specifying whether
    * credential store use is enabled for this service.
    *

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
index e223eed..713c189 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
@@ -70,6 +70,7 @@ public class ServiceImpl implements Service {
   private final Cluster cluster;
   private final ConcurrentMap<String, ServiceComponent> components = new ConcurrentHashMap<>();
   private final boolean isClientOnlyService;
+  private final boolean isCredentialStoreSupported;
 
   @Inject
   private ServiceConfigDAO serviceConfigDAO;
@@ -130,6 +131,8 @@ public class ServiceImpl implements Service {
 
     isClientOnlyService = sInfo.isClientOnlyService();
 
+    isCredentialStoreSupported = sInfo.isCredentialStoreSupported();
+
     persist(serviceEntity);
   }
 
@@ -174,6 +177,7 @@ public class ServiceImpl implements Service {
     ServiceInfo sInfo = ambariMetaInfo.getService(stackId.getStackName(),
         stackId.getStackVersion(), getName());
     isClientOnlyService = sInfo.isClientOnlyService();
+    isCredentialStoreSupported = sInfo.isCredentialStoreSupported();
   }
 
   @Override
@@ -327,45 +331,11 @@ public class ServiceImpl implements Service {
    */
   @Override
   public boolean isCredentialStoreSupported() {
-    ServiceDesiredStateEntity desiredStateEntity = getServiceDesiredStateEntity();
-
-    if (desiredStateEntity != null) {
-      return desiredStateEntity.isCredentialStoreSupported();
-    } else {
-      LOG.warn("Trying to fetch a member from an entity object that may " +
-              "have been previously deleted, serviceName = " + getName());
-    }
-    return false;
+    return isCredentialStoreSupported;
   }
 
 
-  /**
-   * Set a true or false value specifying whether this
-   * service supports credential store.
-   *
-   * @param credentialStoreSupported - true or false
-   */
-  @Override
-  public void setCredentialStoreSupported(boolean credentialStoreSupported) {
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("Setting CredentialStoreEnabled of Service" + ", clusterName="
-              + cluster.getClusterName() + ", clusterId="
-              + cluster.getClusterId() + ", serviceName=" + getName()
-              + ", oldCredentialStoreSupported=" + isCredentialStoreSupported()
-              + ", newCredentialStoreSupported=" + credentialStoreSupported);
-    }
-
-    ServiceDesiredStateEntity desiredStateEntity = getServiceDesiredStateEntity();
-
-    if (desiredStateEntity != null) {
-      desiredStateEntity.setCredentialStoreSupported(credentialStoreSupported);
-      desiredStateEntity = serviceDesiredStateDAO.merge(desiredStateEntity);
 
-    } else {
-      LOG.warn("Setting a member on an entity object that may have been "
-              + "previously deleted, serviceName = " + getName());
-    }
-  }
 
   /**
    * Get a true or false value specifying whether

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
index 2082048..1f93f1f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
@@ -83,7 +83,6 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
   public static final String COMPONENT_VERSION_FK_REPO_VERSION = "FK_scv_repo_version_id";
 
   protected static final String SERVICE_DESIRED_STATE_TABLE = "servicedesiredstate";
-  protected static final String CREDENTIAL_STORE_SUPPORTED_COL = "credential_store_supported";
   protected static final String CREDENTIAL_STORE_ENABLED_COL = "credential_store_enabled";
 
   protected static final String HOST_COMPONENT_DESIREDSTATE_TABLE = "hostcomponentdesiredstate";
@@ -547,12 +546,8 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
    */
   private void updateServiceDesiredStateTable() throws SQLException {
     // ALTER TABLE servicedesiredstate ADD COLUMN
-    // credential_store_supported SMALLINT DEFAULT 0 NOT NULL
     // credential_store_enabled SMALLINT DEFAULT 0 NOT NULL
     dbAccessor.addColumn(SERVICE_DESIRED_STATE_TABLE,
-      new DBColumnInfo(CREDENTIAL_STORE_SUPPORTED_COL, Short.class, null, 0, false));
-
-    dbAccessor.addColumn(SERVICE_DESIRED_STATE_TABLE,
       new DBColumnInfo(CREDENTIAL_STORE_ENABLED_COL, Short.class, null, 0, false));
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index b79c945..c7d7990 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -262,7 +262,6 @@ CREATE TABLE servicedesiredstate (
   service_name VARCHAR(255) NOT NULL,
   maintenance_state VARCHAR(32) NOT NULL,
   security_state VARCHAR(32) NOT NULL DEFAULT 'UNSECURED',
-  credential_store_supported SMALLINT NOT NULL DEFAULT 0,
   credential_store_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT PK_servicedesiredstate PRIMARY KEY (cluster_id, service_name),
   CONSTRAINT FK_sds_desired_stack_id FOREIGN KEY (desired_stack_id) REFERENCES stack(stack_id),

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 1c502bc..de79328 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -272,7 +272,6 @@ CREATE TABLE servicedesiredstate (
   service_name VARCHAR(255) NOT NULL,
   maintenance_state VARCHAR(32) NOT NULL DEFAULT 'ACTIVE',
   security_state VARCHAR(32) NOT NULL DEFAULT 'UNSECURED',
-  credential_store_supported SMALLINT NOT NULL DEFAULT 0,
   credential_store_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT PK_servicedesiredstate PRIMARY KEY (cluster_id, service_name),
   CONSTRAINT FK_sds_desired_stack_id FOREIGN KEY (desired_stack_id) REFERENCES stack(stack_id),

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index c6d4ad0..16c5864 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -252,7 +252,6 @@ CREATE TABLE servicedesiredstate (
   service_name VARCHAR2(255) NOT NULL,
   maintenance_state VARCHAR2(32) NOT NULL,
   security_state VARCHAR2(32) DEFAULT 'UNSECURED' NOT NULL,
-  credential_store_supported SMALLINT DEFAULT 0 NOT NULL,
   credential_store_enabled SMALLINT DEFAULT 0 NOT NULL,
   CONSTRAINT PK_servicedesiredstate PRIMARY KEY (cluster_id, service_name),
   CONSTRAINT FK_sds_desired_stack_id FOREIGN KEY (desired_stack_id) REFERENCES stack(stack_id),

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 1be87bb..91610bb 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -261,7 +261,6 @@ CREATE TABLE servicedesiredstate (
   service_name VARCHAR(255) NOT NULL,
   maintenance_state VARCHAR(32) NOT NULL,
   security_state VARCHAR(32) NOT NULL DEFAULT 'UNSECURED',
-  credential_store_supported SMALLINT NOT NULL DEFAULT 0,
   credential_store_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT PK_servicedesiredstate PRIMARY KEY (cluster_id, service_name),
   CONSTRAINT FK_sds_desired_stack_id FOREIGN KEY (desired_stack_id) REFERENCES stack(stack_id),

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index abe48e8..aebbcb0 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -250,7 +250,6 @@ CREATE TABLE servicedesiredstate (
   service_name VARCHAR(255) NOT NULL,
   maintenance_state VARCHAR(32) NOT NULL DEFAULT 'ACTIVE',
   security_state VARCHAR(32) NOT NULL DEFAULT 'UNSECURED',
-  credential_store_supported SMALLINT NOT NULL DEFAULT 0,
   credential_store_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT PK_servicedesiredstate PRIMARY KEY (cluster_id, service_name),
   CONSTRAINT FK_sds_desired_stack_id FOREIGN KEY (desired_stack_id) REFERENCES stack(stack_id),

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index 169a464..d3eaa6c 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -265,7 +265,6 @@ CREATE TABLE servicedesiredstate (
   service_name VARCHAR(255) NOT NULL,
   maintenance_state VARCHAR(32) NOT NULL,
   security_state VARCHAR(32) NOT NULL DEFAULT 'UNSECURED',
-  credential_store_supported SMALLINT NOT NULL DEFAULT 0,
   credential_store_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT PK_servicedesiredstate PRIMARY KEY CLUSTERED (cluster_id,service_name),
   CONSTRAINT FK_sds_desired_stack_id FOREIGN KEY (desired_stack_id) REFERENCES stack(stack_id),

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index a32fbfb..ba610a0 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -48,9 +48,7 @@ from resource_management.libraries.functions.get_architecture import get_archite
 
 from resource_management.core.utils import PasswordString
 from resource_management.core.shell import checked_call
-from resource_management.core.logger import Logger
-from resource_management.core.resources.system import File
-from resource_management.core.source import DownloadSource
+from ambari_commons.credential_store_helper import get_password_from_credential_store
 
 # Default log4j version; put config files under /etc/hive/conf
 log4j_version = '1'
@@ -230,36 +228,15 @@ hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.opti
 
 jdk_location = config['hostLevelParams']['jdk_location']
 
-credential_util_cmd = 'org.apache.ambari.server.credentialapi.CredentialUtil'
-credential_util_jar = 'CredentialUtil.jar'
-
-# Gets the hive metastore password from its JCEKS provider, if available.
-def getHiveMetastorePassword():
-  passwd = ''
+if credential_store_enabled:
   if 'hadoop.security.credential.provider.path' in config['configurations']['hive-site']:
-    # Try to download CredentialUtil.jar from ambari-server resources
     cs_lib_path = config['configurations']['hive-site']['credentialStoreClassPath']
-    credential_util_dir = cs_lib_path.split('*')[0] # Remove the trailing '*'
-    credential_util_path = os.path.join(credential_util_dir, credential_util_jar)
-    credential_util_url =  jdk_location + credential_util_jar
-    File(credential_util_path,
-         content = DownloadSource(credential_util_url),
-         mode = 0644,
-    )
-
-    # Execute a get command on the CredentialUtil CLI to get the password for the specified alias
     java_home = config['hostLevelParams']['java_home']
-    java_bin = '{java_home}/bin/java'.format(java_home=java_home)
     alias = 'javax.jdo.option.ConnectionPassword'
     provider_path = config['configurations']['hive-site']['hadoop.security.credential.provider.path']
-    cmd = (java_bin, '-cp', cs_lib_path, credential_util_cmd, 'get', alias, '-provider', provider_path)
-    cmd_result, std_out_msg  = checked_call(cmd)
-    std_out_lines = std_out_msg.split('\n')
-    passwd = std_out_lines[-1] # Get the last line of the output, to skip warnings if any.
-  return passwd
-
-if credential_store_enabled:
-  hive_metastore_user_passwd = PasswordString(getHiveMetastorePassword())
+    hive_metastore_user_passwd = PasswordString(get_password_from_credential_store(alias, provider_path, cs_lib_path, java_home, jdk_location))
+  else:
+    raise Exception("hadoop.security.credential.provider.path property should be set")
 else:
   hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
 hive_metastore_user_passwd = unicode(hive_metastore_user_passwd) if not is_empty(hive_metastore_user_passwd) else hive_metastore_user_passwd
@@ -843,4 +820,4 @@ if enable_ranger_hive:
   if has_ranger_admin and stack_supports_ranger_audit_db and xa_audit_db_flavor.lower() == 'sqla':
     xa_audit_db_is_enabled = False
 
-# ranger hive plugin section end
\ No newline at end of file
+# ranger hive plugin section end

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index eb438e7..d30a465 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -34,6 +34,8 @@ from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions.get_architecture import get_architecture
 from resource_management.libraries.functions.stack_features import get_stack_feature_version
 
+from resource_management.core.utils import PasswordString
+from ambari_commons.credential_store_helper import get_password_from_credential_store
 from urlparse import urlparse
 
 import status_params
@@ -166,6 +168,10 @@ zk_connection_string = default('/configurations/oozie-site/oozie.zookeeper.conne
 jaas_file = os.path.join(conf_dir, 'zkmigrator_jaas.conf')
 stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
 
+credential_store_enabled = False
+if 'credentialStoreEnabled' in config:
+  credential_store_enabled = config['credentialStoreEnabled']
+
 if security_enabled:
   oozie_site = dict(config['configurations']['oozie-site'])
   oozie_principal_with_host = oozie_principal.replace('_HOST', hostname)
@@ -195,7 +201,19 @@ oozie_env_sh_template = config['configurations']['oozie-env']['content']
 oracle_driver_jar_name = "ojdbc6.jar"
 
 oozie_metastore_user_name = config['configurations']['oozie-site']['oozie.service.JPAService.jdbc.username']
-oozie_metastore_user_passwd = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.password","")
+
+if credential_store_enabled:
+  if 'hadoop.security.credential.provider.path' in config['configurations']['oozie-site']:
+    cs_lib_path = config['configurations']['oozie-site']['credentialStoreClassPath']
+    java_home = config['hostLevelParams']['java_home']
+    alias = 'oozie.service.JPAService.jdbc.password'
+    provider_path = config['configurations']['oozie-site']['hadoop.security.credential.provider.path']
+    oozie_metastore_user_passwd = PasswordString(get_password_from_credential_store(alias, provider_path, cs_lib_path, java_home, jdk_location))
+  else:
+    raise Exception("hadoop.security.credential.provider.path property should be set")
+else:
+  oozie_metastore_user_passwd = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.password","")
+
 oozie_jdbc_connection_url = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.url", "")
 oozie_log_dir = config['configurations']['oozie-env']['oozie_log_dir']
 oozie_data_dir = config['configurations']['oozie-env']['oozie_data_dir']

http://git-wip-us.apache.org/repos/asf/ambari/blob/d252665c/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
index 1c742ef..cee490b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
@@ -189,9 +189,7 @@ public class UpgradeCatalog250Test {
       eq("repo_version"), eq("repo_version_id"), eq(false));
 
     // servicedesiredstate table
-    Capture<DBAccessor.DBColumnInfo> capturedCredentialStoreSupportedCol = newCapture();
     Capture<DBAccessor.DBColumnInfo> capturedCredentialStoreEnabledCol = newCapture();
-    dbAccessor.addColumn(eq(UpgradeCatalog250.SERVICE_DESIRED_STATE_TABLE), capture(capturedCredentialStoreSupportedCol));
     dbAccessor.addColumn(eq(UpgradeCatalog250.SERVICE_DESIRED_STATE_TABLE), capture(capturedCredentialStoreEnabledCol));
 
     expect(dbAccessor.getConnection()).andReturn(connection).anyTimes();
@@ -253,16 +251,8 @@ public class UpgradeCatalog250Test {
     // did we get them all?
     Assert.assertEquals(0, expected.size());
 
-    // Verify if credential_store_supported & credential_store_enabled columns
+    // Verify if credential_store_enabled columns
     // were added to servicedesiredstate table
-    DBAccessor.DBColumnInfo capturedCredentialStoreSupportedColValues = capturedCredentialStoreSupportedCol.getValue();
-    Assert.assertNotNull(capturedCredentialStoreSupportedColValues);
-
-    Assert.assertEquals(UpgradeCatalog250.CREDENTIAL_STORE_SUPPORTED_COL, capturedCredentialStoreSupportedColValues.getName());
-    Assert.assertEquals(null, capturedCredentialStoreSupportedColValues.getLength());
-    Assert.assertEquals(Short.class, capturedCredentialStoreSupportedColValues.getType());
-    Assert.assertEquals(0, capturedCredentialStoreSupportedColValues.getDefaultValue());
-    Assert.assertEquals(false, capturedCredentialStoreSupportedColValues.isNullable());
 
     DBAccessor.DBColumnInfo capturedCredentialStoreEnabledColValues = capturedCredentialStoreEnabledCol.getValue();
     Assert.assertNotNull(capturedCredentialStoreEnabledColValues);


[19/50] [abbrv] ambari git commit: AMBARI-19951. Set "yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled" property value during RU/EU to HDP2.6 (dgrinenko via dlysnichenko)

Posted by nc...@apache.org.
AMBARI-19951. Set "yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled" property value during RU/EU to HDP2.6 (dgrinenko via dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fc9788af
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fc9788af
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fc9788af

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: fc9788af1d5c9533de5961e5bc97cf46b2a98b44
Parents: 41034aa
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Fri Feb 10 15:16:53 2017 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Fri Feb 10 15:16:53 2017 +0200

----------------------------------------------------------------------
 .../main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml | 7 +++++++
 .../stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml        | 6 ++++++
 .../main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml    | 1 +
 .../main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml | 7 +++++++
 .../stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml        | 7 +++++++
 .../main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml    | 1 +
 .../main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml | 7 +++++++
 .../stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml        | 7 +++++++
 .../main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml    | 1 +
 9 files changed, 44 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fc9788af/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
index eac318e..478f9b4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
@@ -511,6 +511,13 @@
             <type>yarn-env</type>
             <insert key="content" value="{% if rm_security_opts is defined %} YARN_OPTS=&quot;{{rm_security_opts}} $YARN_OPTS&quot; {% endif %}" insert-type="append" newline-before="true" newline-after="true" />
           </definition>
+          <definition xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption">
+            <type>yarn-site</type>
+            <transfer operation="copy"
+                      from-key="yarn.resourcemanager.scheduler.monitor.enable"
+                      to-key="yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled"
+                      default-value="false"/>
+          </definition>
         </changes>
       </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc9788af/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
index 8da11ff..0d4e3b8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
@@ -319,6 +319,12 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for Resource Manager">
+        <task xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption">
+          <summary>Updating underutilized_preemption setting</summary>
+        </task>
+      </execute-stage>
+
       <execute-stage service="MAPREDUCE2" component="MAPREDUCE2_CLIENT" title="Apply config changes for Mapreduce2 client">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath">
           <summary>Verifying LZO codec path for mapreduce</summary>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc9788af/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
index 01fc102..58db4a9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
@@ -762,6 +762,7 @@
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_yarn_audit_db" />
           <task xsi:type="configure" id="yarn_log4j_parameterize" />
           <task xsi:type="configure" id="yarn_env_security_opts" />
+          <task xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption" />
         </pre-upgrade>
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc9788af/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index cc50ac5..18f5fa1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -297,6 +297,13 @@
             <type>yarn-env</type>
             <insert key="content" value="{% if rm_security_opts is defined %} YARN_OPTS=&quot;{{rm_security_opts}} $YARN_OPTS&quot; {% endif %}" insert-type="append" newline-before="true" newline-after="true" />
           </definition>
+          <definition xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption">
+            <type>yarn-site</type>
+            <transfer operation="copy"
+                      from-key="yarn.resourcemanager.scheduler.monitor.enable"
+                      to-key="yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled"
+                      default-value="false"/>
+          </definition>
         </changes>
       </component>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc9788af/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
index 046904b..eedf98c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
@@ -293,6 +293,13 @@
         <task xsi:type="configure" id="hdp_2_5_0_0_add_spark2_yarn_shuffle"/>
       </execute-stage>
 
+      <!--Yarn-->
+      <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for Resource Manager">
+        <task xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption">
+          <summary>Updating underutilized_preemption setting</summary>
+        </task>
+      </execute-stage>
+
       <!--TEZ-->
       <execute-stage service="TEZ" component="TEZ_CLIENT" title="Verify LZO codec path for Tez">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath">

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc9788af/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
index 70bb2ca..392e0fa 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
@@ -767,6 +767,7 @@
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_yarn_audit_db" />
           <task xsi:type="configure" id="yarn_log4j_parameterize" />
           <task xsi:type="configure" id="yarn_env_security_opts" />
+          <task xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption" />
         </pre-upgrade>
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc9788af/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
index a5bfcf6..100df8f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
@@ -139,6 +139,13 @@
           <type>yarn-env</type>
           <insert key="content" value="{% if rm_security_opts is defined %} YARN_OPTS=&quot;{{rm_security_opts}} $YARN_OPTS&quot; {% endif %}" insert-type="append" newline-before="true" newline-after="true" />
         </definition>
+        <definition xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption">
+          <type>yarn-site</type>
+          <transfer operation="copy"
+                    from-key="yarn.resourcemanager.scheduler.monitor.enable"
+                    to-key="yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled"
+                    default-value="false"/>
+        </definition>
       </changes>
     </component>
   </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc9788af/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index 5b8351b..6e92141 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -298,6 +298,13 @@
         </task>
       </execute-stage>
 
+      <!--Yarn-->
+      <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for Resource Manager">
+        <task xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption">
+          <summary>Updating underutilized_preemption setting</summary>
+        </task>
+      </execute-stage>
+
       <!--TEZ-->
       <execute-stage service="TEZ" component="TEZ_CLIENT" title="Verify LZO codec path for Tez">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath">

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc9788af/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
index 2f07c97..bc68754 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
@@ -669,6 +669,7 @@
         <pre-upgrade>
           <task xsi:type="configure" id="yarn_log4j_parameterize" />
           <task xsi:type="configure" id="yarn_env_security_opts" />
+          <task xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption" />
         </pre-upgrade>
         <pre-downgrade />
         <upgrade>


[30/50] [abbrv] ambari git commit: AMBARI-19577. Add Livy session recovery configurations in Ambari - fix merge issue (Saisai Shao via smohanty)

Posted by nc...@apache.org.
AMBARI-19577. Add Livy session recovery configurations in Ambari - fix merge issue (Saisai Shao via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b00cf74f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b00cf74f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b00cf74f

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b00cf74f6cecb984381eeed9e464ad1db6461a18
Parents: e7d0e78
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Fri Feb 10 16:53:45 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Fri Feb 10 16:53:45 2017 -0800

----------------------------------------------------------------------
 .../stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b00cf74f/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml
index c8a65bd..c21a5e6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/livy2-conf.xml
@@ -85,8 +85,8 @@
             Whether to enable HiveContext in livy interpreter
         </description>
         <on-ambari-upgrade add="false"/>
-    <property>
     </property>
+    <property>
         <name>livy.server.recovery.mode</name>
         <value>recovery</value>
         <description>


[31/50] [abbrv] ambari git commit: AMBARI-19667. Hive View 2.0: Editor should be stretchable by dragging (pallavkul)

Posted by nc...@apache.org.
AMBARI-19667. Hive View 2.0: Editor should be stretchable by dragging (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d96d209c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d96d209c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d96d209c

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d96d209c1ad8aeefd9ab1c02fbe621c7dfd02ffa
Parents: b00cf74
Author: pallavkul <pa...@gmail.com>
Authored: Sat Feb 11 08:46:11 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Sat Feb 11 08:46:11 2017 +0530

----------------------------------------------------------------------
 .../src/main/resources/ui/app/components/query-editor.js    | 8 ++++++++
 .../views/hive20/src/main/resources/ui/app/styles/app.scss  | 9 +++++++++
 contrib/views/hive20/src/main/resources/ui/bower.json       | 1 +
 .../views/hive20/src/main/resources/ui/ember-cli-build.js   | 2 ++
 4 files changed, 20 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d96d209c/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js b/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
index 27d43d5..7bfe223 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
@@ -74,6 +74,14 @@ export default Ember.Component.extend({
       });
     });
 
+    this.$('.CodeMirror').resizable({
+      handles: 's',
+
+      resize: function () {
+        Ember.run.debounce(this, updateSize, 150);
+      }
+    }).find('.ui-resizable-s').addClass('grip fa fa-reorder');
+
 
   }.on('didInsertElement'),
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d96d209c/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
index 0dae396..1dc86d7 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
@@ -223,6 +223,15 @@ pre {
   overflow-y: scroll;
 }
 
+.grip {
+  height: 20px;
+  border: 0 1px 1px solid #ddd;
+  background-color: #f5f5f5;
+  color: #bbb;
+  text-align: center;
+  font-size: inherit;
+}
+
 .hv-dropdown {
   position: absolute;
   .dropdown-menu {

http://git-wip-us.apache.org/repos/asf/ambari/blob/d96d209c/contrib/views/hive20/src/main/resources/ui/bower.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/bower.json b/contrib/views/hive20/src/main/resources/ui/bower.json
index f4d9aa0..9fa7076 100644
--- a/contrib/views/hive20/src/main/resources/ui/bower.json
+++ b/contrib/views/hive20/src/main/resources/ui/bower.json
@@ -6,6 +6,7 @@
     "ember-cli-shims": "~0.1.1",
     "ember-qunit-notifications": "0.1.0",
     "font-awesome": "~4.5.0",
+    "jquery-ui": "~1.12.1",
     "codemirror": "~5.15.0",
     "bootstrap-treeview": "~1.2.0",
     "blob": "*"

http://git-wip-us.apache.org/repos/asf/ambari/blob/d96d209c/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js b/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js
index 10e0402..d53cdac 100644
--- a/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js
+++ b/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js
@@ -55,6 +55,8 @@ module.exports = function(defaults) {
    app.import('bower_components/codemirror/addon/hint/show-hint.js');
    app.import('bower_components/d3/d3.js');
    app.import('bower_components/codemirror/lib/codemirror.css');
+   app.import('bower_components/jquery-ui/jquery-ui.js');
+   app.import('bower_components/jquery-ui/themes/base/jquery-ui.css');
    app.import('bower_components/codemirror/addon/hint/show-hint.css');
 
   /*


[24/50] [abbrv] ambari git commit: AMBARI-19963 Deviation alerts fail with "No JSON object could be decoded" (dsen)

Posted by nc...@apache.org.
AMBARI-19963 Deviation alerts fail with "No JSON object could be decoded" (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fe1704e1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fe1704e1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fe1704e1

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: fe1704e12287685f8ca0fed179eb471556eee37b
Parents: 33caec2
Author: Dmytro Sen <ds...@apache.org>
Authored: Fri Feb 10 19:52:40 2017 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Fri Feb 10 19:52:40 2017 +0200

----------------------------------------------------------------------
 .../src/main/python/ambari_commons/network.py   | 39 ++++++++++++++++++++
 .../package/scripts/metrics_grafana_util.py     |  2 +-
 .../0.1.0/package/scripts/network.py            | 39 --------------------
 .../0.1.0/package/scripts/service_check.py      |  2 +-
 .../package/alerts/alert_metrics_deviation.py   | 14 +++++--
 .../package/alerts/alert_metrics_deviation.py   | 14 +++++--
 .../2.0.6/HDFS/test_alert_metrics_deviation.py  |  2 +
 7 files changed, 65 insertions(+), 47 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fe1704e1/ambari-common/src/main/python/ambari_commons/network.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/network.py b/ambari-common/src/main/python/ambari_commons/network.py
new file mode 100644
index 0000000..b5b1cd6
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_commons/network.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import httplib
+import ssl
+
+from resource_management.core.exceptions import Fail
+
+def get_http_connection(host, port, https_enabled=False, ca_certs=None):
+  if https_enabled:
+    if ca_certs:
+      check_ssl_certificate(host, port, ca_certs)
+    return httplib.HTTPSConnection(host, port)
+  else:
+    return httplib.HTTPConnection(host, port)
+
+def check_ssl_certificate(host, port, ca_certs):
+  try:
+    ssl.get_server_certificate((host, port), ssl_version=ssl.PROTOCOL_SSLv23, ca_certs=ca_certs)
+  except (ssl.SSLError) as ssl_error:
+    raise Fail("Failed to verify the SSL certificate for https://{0}:{1} with CA certificate in {2}"
+               .format(host, port, ca_certs))

http://git-wip-us.apache.org/repos/asf/ambari/blob/fe1704e1/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py
index 84dcd99..a751330 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py
@@ -31,7 +31,7 @@ import random
 import time
 import socket
 import ambari_simplejson as json
-import network
+import ambari_commons.network as network
 import os
 
 GRAFANA_CONNECT_TRIES = 15

http://git-wip-us.apache.org/repos/asf/ambari/blob/fe1704e1/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/network.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/network.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/network.py
deleted file mode 100644
index 672ee53..0000000
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/network.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import httplib
-import ssl
-
-from resource_management.core.exceptions import Fail
-
-def get_http_connection(host, port, https_enabled=False, ca_certs=None):
-  if https_enabled:
-    if ca_certs:
-      check_ssl_certificate(host, port, ca_certs)
-    return httplib.HTTPSConnection(host, port)
-  else:
-    return httplib.HTTPConnection(host, port)
-
-def check_ssl_certificate(host, port, ca_certs):
-  try:
-    ssl.get_server_certificate((host, port), ssl_version=ssl.PROTOCOL_SSLv23, ca_certs=ca_certs)
-  except (ssl.SSLError) as ssl_error:
-    raise Fail("Failed to verify the SSL certificate for AMS Collector https://{0}:{1} with CA certificate in {2}"
-               .format(host, port, ca_certs))

http://git-wip-us.apache.org/repos/asf/ambari/blob/fe1704e1/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
index 2bc4363..e753958 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
@@ -28,7 +28,7 @@ from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.parallel_processing import PrallelProcessResult, execute_in_parallel, SUCCESS
 
 import httplib
-import network
+import ambari_commons.network as network
 import urllib
 import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
 import os

http://git-wip-us.apache.org/repos/asf/ambari/blob/fe1704e1/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/alerts/alert_metrics_deviation.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/alerts/alert_metrics_deviation.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/alerts/alert_metrics_deviation.py
index 8a06f56..bc2102a 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/alerts/alert_metrics_deviation.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/alerts/alert_metrics_deviation.py
@@ -24,6 +24,8 @@ import logging
 import urllib
 import time
 import urllib2
+import os
+import ambari_commons.network as network
 
 from resource_management import Environment
 from ambari_commons.aggregate_functions import sample_standard_deviation, mean
@@ -55,6 +57,7 @@ SECURITY_ENABLED_KEY = '{{cluster-env/security_enabled}}'
 SMOKEUSER_KEY = '{{cluster-env/smokeuser}}'
 EXECUTABLE_SEARCH_PATHS = '{{kerberos-env/executable_search_paths}}'
 
+AMS_HTTP_POLICY = '{{ams-site/timeline.metrics.service.http.policy}}'
 METRICS_COLLECTOR_WEBAPP_ADDRESS_KEY = '{{ams-site/timeline.metrics.service.webapp.address}}'
 METRICS_COLLECTOR_VIP_HOST_KEY = '{{cluster-env/metrics_collector_vip_host}}'
 METRICS_COLLECTOR_VIP_PORT_KEY = '{{cluster-env/metrics_collector_vip_port}}'
@@ -105,7 +108,7 @@ def get_tokens():
           EXECUTABLE_SEARCH_PATHS, NN_HTTPS_ADDRESS_KEY, SMOKEUSER_KEY,
           KERBEROS_KEYTAB, KERBEROS_PRINCIPAL, SECURITY_ENABLED_KEY,
           METRICS_COLLECTOR_VIP_HOST_KEY, METRICS_COLLECTOR_VIP_PORT_KEY,
-          METRICS_COLLECTOR_WEBAPP_ADDRESS_KEY)
+          METRICS_COLLECTOR_WEBAPP_ADDRESS_KEY, AMS_HTTP_POLICY)
 
 def execute(configurations={}, parameters={}, host_name=None):
   """
@@ -310,9 +313,14 @@ def execute(configurations={}, parameters={}, host_name=None):
 
   encoded_get_metrics_parameters = urllib.urlencode(get_metrics_parameters)
 
+  ams_monitor_conf_dir = "/etc/ambari-metrics-monitor/conf"
+  metric_truststore_ca_certs='ca.pem'
+  ca_certs = os.path.join(ams_monitor_conf_dir,
+                          metric_truststore_ca_certs)
+  metric_collector_https_enabled = str(configurations[AMS_HTTP_POLICY]) == "HTTPS_ONLY"
+
   try:
-    conn = httplib.HTTPConnection(collector_host, int(collector_port),
-                                  timeout=connection_timeout)
+    conn = network.get_http_connection(collector_host, int(collector_port), metric_collector_https_enabled, ca_certs)
     conn.request("GET", AMS_METRICS_GET_URL % encoded_get_metrics_parameters)
     response = conn.getresponse()
     data = response.read()

http://git-wip-us.apache.org/repos/asf/ambari/blob/fe1704e1/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/alerts/alert_metrics_deviation.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/alerts/alert_metrics_deviation.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/alerts/alert_metrics_deviation.py
index 8a06f56..bc2102a 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/alerts/alert_metrics_deviation.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/alerts/alert_metrics_deviation.py
@@ -24,6 +24,8 @@ import logging
 import urllib
 import time
 import urllib2
+import os
+import ambari_commons.network as network
 
 from resource_management import Environment
 from ambari_commons.aggregate_functions import sample_standard_deviation, mean
@@ -55,6 +57,7 @@ SECURITY_ENABLED_KEY = '{{cluster-env/security_enabled}}'
 SMOKEUSER_KEY = '{{cluster-env/smokeuser}}'
 EXECUTABLE_SEARCH_PATHS = '{{kerberos-env/executable_search_paths}}'
 
+AMS_HTTP_POLICY = '{{ams-site/timeline.metrics.service.http.policy}}'
 METRICS_COLLECTOR_WEBAPP_ADDRESS_KEY = '{{ams-site/timeline.metrics.service.webapp.address}}'
 METRICS_COLLECTOR_VIP_HOST_KEY = '{{cluster-env/metrics_collector_vip_host}}'
 METRICS_COLLECTOR_VIP_PORT_KEY = '{{cluster-env/metrics_collector_vip_port}}'
@@ -105,7 +108,7 @@ def get_tokens():
           EXECUTABLE_SEARCH_PATHS, NN_HTTPS_ADDRESS_KEY, SMOKEUSER_KEY,
           KERBEROS_KEYTAB, KERBEROS_PRINCIPAL, SECURITY_ENABLED_KEY,
           METRICS_COLLECTOR_VIP_HOST_KEY, METRICS_COLLECTOR_VIP_PORT_KEY,
-          METRICS_COLLECTOR_WEBAPP_ADDRESS_KEY)
+          METRICS_COLLECTOR_WEBAPP_ADDRESS_KEY, AMS_HTTP_POLICY)
 
 def execute(configurations={}, parameters={}, host_name=None):
   """
@@ -310,9 +313,14 @@ def execute(configurations={}, parameters={}, host_name=None):
 
   encoded_get_metrics_parameters = urllib.urlencode(get_metrics_parameters)
 
+  ams_monitor_conf_dir = "/etc/ambari-metrics-monitor/conf"
+  metric_truststore_ca_certs='ca.pem'
+  ca_certs = os.path.join(ams_monitor_conf_dir,
+                          metric_truststore_ca_certs)
+  metric_collector_https_enabled = str(configurations[AMS_HTTP_POLICY]) == "HTTPS_ONLY"
+
   try:
-    conn = httplib.HTTPConnection(collector_host, int(collector_port),
-                                  timeout=connection_timeout)
+    conn = network.get_http_connection(collector_host, int(collector_port), metric_collector_https_enabled, ca_certs)
     conn.request("GET", AMS_METRICS_GET_URL % encoded_get_metrics_parameters)
     response = conn.getresponse()
     data = response.read()

http://git-wip-us.apache.org/repos/asf/ambari/blob/fe1704e1/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_alert_metrics_deviation.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_alert_metrics_deviation.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_alert_metrics_deviation.py
index 1e35e6f..09e8886 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_alert_metrics_deviation.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_alert_metrics_deviation.py
@@ -81,6 +81,7 @@ class TestAlertMetricsDeviation(RMFTestCase):
       '{{hdfs-site/dfs.namenode.https-address}}': 'c6401.ambari.apache.org:50470',
       '{{hdfs-site/dfs.http.policy}}': 'HTTP_ONLY',
       '{{ams-site/timeline.metrics.service.webapp.address}}': '0.0.0.0:6188',
+      '{{ams-site/timeline.metrics.service.http.policy}}' : 'HTTP_ONLY',
       '{{hdfs-site/dfs.namenode.http-address}}': 'c6401.ambari.apache.org:50070',
       '{{cluster-env/security_enabled}}': 'false',
       '{{cluster-env/smokeuser}}': 'ambari-qa',
@@ -105,6 +106,7 @@ class TestAlertMetricsDeviation(RMFTestCase):
       '{{hdfs-site/dfs.namenode.https-address}}': 'c6401.ambari.apache.org:50470',
       '{{hdfs-site/dfs.http.policy}}': 'HTTP_ONLY',
       '{{ams-site/timeline.metrics.service.webapp.address}}': '0.0.0.0:6188',
+      '{{ams-site/timeline.metrics.service.http.policy}}' : 'HTTP_ONLY',
       '{{hdfs-site/dfs.namenode.http-address}}': 'c6401.ambari.apache.org:50070',
       '{{cluster-env/security_enabled}}': 'false',
       '{{cluster-env/smokeuser}}': 'ambari-qa',


[17/50] [abbrv] ambari git commit: AMBARI-19947. To make sure LLAP preemption happens correctly, should enable priority-utilization.underutilized-preemption.enabled=true when YARN preemption is enabled.(vbrodetskyi)

Posted by nc...@apache.org.
AMBARI-19947. To make sure LLAP preemption happens correctly, should enable priority-utilization.underutilized-preemption.enabled=true when YARN preemption is enabled.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/59545f7f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/59545f7f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/59545f7f

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 59545f7fde971c352c7f7f44a19f436887fcdfe7
Parents: b15b606
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Fri Feb 10 14:41:12 2017 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Fri Feb 10 14:41:12 2017 +0200

----------------------------------------------------------------------
 .../YARN/3.0.0.3.0/service_advisor.py                | 15 ++++++++++++++-
 .../2.6/services/YARN/configuration/yarn-site.xml    | 12 ++++++++++++
 .../stacks/HDP/2.6/services/stack_advisor.py         | 12 ++++++++++++
 .../3.0/services/YARN/configuration/yarn-site.xml    | 13 +++++++++++++
 4 files changed, 51 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/59545f7f/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
index 30dd474..aecf1e3 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
@@ -130,6 +130,7 @@ class YARNServiceAdvisor(service_advisor.ServiceAdvisor):
     recommender.recommendYARNConfigurationsFromHDP22(configurations, clusterData, services, hosts)
     recommender.recommendYARNConfigurationsFromHDP23(configurations, clusterData, services, hosts)
     recommender.recommendYARNConfigurationsFromHDP25(configurations, clusterData, services, hosts)
+    recommender.recommendYARNConfigurationsFromHDP26(configurations, clusterData, services, hosts)
 
   def getServiceConfigurationsValidationItems(self, configurations, recommendedDefaults, services, hosts):
     """
@@ -435,6 +436,18 @@ class YARNRecommender(service_advisor.ServiceAdvisor):
     putYarnSiteProperty('yarn.timeline-service.entity-group-fs-store.group-id-plugin-classes', ",".join(timeline_plugin_classes_values))
     putYarnSiteProperty('yarn.timeline-service.entity-group-fs-store.group-id-plugin-classpath', ":".join(timeline_plugin_classpath_values))
 
+
+  def recommendYARNConfigurationsFromHDP26(self, configurations, clusterData, services, hosts):
+    putYarnSiteProperty = self.putProperty(configurations, "yarn-site", services)
+
+    if "yarn-site" in services["configurations"] and \
+                    "yarn.resourcemanager.scheduler.monitor.enable" in services["configurations"]["yarn-site"]["properties"]:
+      scheduler_monitor_enabled = services["configurations"]["yarn-site"]["properties"]["yarn.resourcemanager.scheduler.monitor.enable"]
+      if scheduler_monitor_enabled.lower() == 'true':
+        putYarnSiteProperty('yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled', "true")
+      else:
+        putYarnSiteProperty('yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled', "false")
+
   #region LLAP
   def updateLlapConfigs(self, configurations, services, hosts, llap_queue_name):
     """
@@ -1787,4 +1800,4 @@ class MAPREDUCE2Validator(service_advisor.ServiceAdvisor):
       if yarnAppMapreduceAmCommandOpts > yarnAppMapreduceAmResourceMb:
         validationItems.append({"config-name": 'yarn.app.mapreduce.am.command-opts', "item": self.getWarnItem("yarn.app.mapreduce.am.command-opts Xmx should be less than yarn.app.mapreduce.am.resource.mb ({0})".format(yarnAppMapreduceAmResourceMb))})
 
-    return self.toConfigurationValidationProblems(validationItems, "mapred-site")
\ No newline at end of file
+    return self.toConfigurationValidationProblems(validationItems, "mapred-site")

http://git-wip-us.apache.org/repos/asf/ambari/blob/59545f7f/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
index 58b528e..70a2cbe 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
@@ -23,4 +23,16 @@
     <description>When HA is enabled, the class to be used by Clients, AMs and NMs to failover to the Active RM. It should extend org.apache.hadoop.yarn.client.RMFailoverProxyProvider</description>
     <on-ambari-upgrade add="false"/>
   </property>
+  <property>
+    <name>yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled</name>
+    <value>false</value>
+    <description>This property need to be set to make preemption can happen under a fragmented cluster.</description>
+    <depends-on>
+      <property>
+        <type>yarn-site</type>
+        <name>yarn.resourcemanager.scheduler.monitor.enable</name>
+      </property>
+    </depends-on>
+    <on-ambari-upgrade add="false"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/59545f7f/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
index e6ebd6f..969c3dd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
@@ -138,6 +138,18 @@ class HDP26StackAdvisor(HDP25StackAdvisor):
         elif superset_database_type == "postgresql":
             putSupersetProperty("SUPERSET_DATABASE_PORT", "5432")
 
+  def recommendYARNConfigurations(self, configurations, clusterData, services, hosts):
+    super(HDP26StackAdvisor, self).recommendYARNConfigurations(configurations, clusterData, services, hosts)
+    putYarnSiteProperty = self.putProperty(configurations, "yarn-site", services)
+
+    if "yarn-site" in services["configurations"] and \
+                    "yarn.resourcemanager.scheduler.monitor.enable" in services["configurations"]["yarn-site"]["properties"]:
+      scheduler_monitor_enabled = services["configurations"]["yarn-site"]["properties"]["yarn.resourcemanager.scheduler.monitor.enable"]
+      if scheduler_monitor_enabled.lower() == 'true':
+        putYarnSiteProperty('yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled', "true")
+      else:
+        putYarnSiteProperty('yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled', "false")
+
   def getMetadataConnectionString(self, database_type):
       driverDict = {
           'mysql': 'jdbc:mysql://{0}:{2}/{1}?createDatabaseIfNotExist=true',

http://git-wip-us.apache.org/repos/asf/ambari/blob/59545f7f/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration/yarn-site.xml
index 0f46d75..01551c4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration/yarn-site.xml
@@ -25,6 +25,19 @@
     <on-ambari-upgrade add="false"/>
   </property>
 
+  <property>
+    <name>yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled</name>
+    <value>false</value>
+    <description>This property need to be set to make preemption can happen under a fragmented cluster.</description>
+    <depends-on>
+       <property>
+          <type>yarn-site</type>
+          <name>yarn.resourcemanager.scheduler.monitor.enable</name>
+       </property>
+    </depends-on>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
   <!-- These configs were inherited from HDP 2.5 -->
   <property>
     <name>yarn.nodemanager.aux-services.spark2_shuffle.classpath</name>


[09/50] [abbrv] ambari git commit: AMBARI-19935. Details of multi-condition Ranger Access policy are not visible in Hive View - Table Authorizations column. (dipayanb)

Posted by nc...@apache.org.
AMBARI-19935. Details of multi-condition Ranger Access policy are not visible in Hive View - Table Authorizations column. (dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2ce10423
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2ce10423
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2ce10423

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 2ce10423190f999b794140a270601a62dc6b7f95
Parents: bc80665
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Fri Feb 10 14:59:33 2017 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Fri Feb 10 15:00:03 2017 +0530

----------------------------------------------------------------------
 .../resources/system/ranger/RangerService.java  | 32 ++++++++++-----
 .../databases/database/tables/table/auth.hbs    | 41 ++++++++++++--------
 2 files changed, 47 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2ce10423/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/ranger/RangerService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/ranger/RangerService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/ranger/RangerService.java
index 9debe42..d300d9a 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/ranger/RangerService.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/ranger/RangerService.java
@@ -137,8 +137,9 @@ public class RangerService {
     JSONArray policyItems = (JSONArray) policyJson.get("policyItems");
     Policy policy = new Policy(name);
 
-    if (policyItems.size() > 0) {
-      JSONObject policyItem = (JSONObject) policyItems.get(0);
+    for(Object item: policyItems) {
+      PolicyCondition condition = new PolicyCondition();
+      JSONObject policyItem = (JSONObject) item;
       JSONArray usersJson = (JSONArray) policyItem.get("users");
       JSONArray groupsJson = (JSONArray) policyItem.get("groups");
       JSONArray accesses = (JSONArray) policyItem.get("accesses");
@@ -148,19 +149,20 @@ public class RangerService {
         JSONObject access = (JSONObject) accessJson;
         Boolean isAllowed = (Boolean) access.get("isAllowed");
         if (isAllowed) {
-          policy.addAccess((String) access.get("type"));
+          condition.addAccess((String) access.get("type"));
         }
       }
 
       for (Object user : usersJson) {
-        policy.addUser((String) user);
+        condition.addUser((String) user);
       }
 
       for (Object group : groupsJson) {
-        policy.addGroup((String) group);
+        condition.addGroup((String) group);
       }
-    }
 
+      policy.addCondition(condition);
+    }
 
     return policy;
   }
@@ -266,9 +268,7 @@ public class RangerService {
    */
   public static class Policy {
     private String name;
-    private List<String> users = new ArrayList<>();
-    private List<String> groups = new ArrayList<>();
-    private List<String> accesses = new ArrayList<>();
+    private List<PolicyCondition> conditions = new ArrayList<>();
 
     public Policy(String name) {
       this.name = name;
@@ -282,6 +282,20 @@ public class RangerService {
       this.name = name;
     }
 
+    public List<PolicyCondition> getConditions() {
+      return conditions;
+    }
+
+    public void addCondition(PolicyCondition condition) {
+      this.conditions.add(condition);
+    }
+  }
+
+  public static class PolicyCondition {
+    private List<String> users = new ArrayList<>();
+    private List<String> groups = new ArrayList<>();
+    private List<String> accesses = new ArrayList<>();
+
     public List<String> getUsers() {
       return users;
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2ce10423/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/auth.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/auth.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/auth.hbs
index 416f12f..e00eeb8 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/auth.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/auth.hbs
@@ -25,7 +25,7 @@
   <table class="table table-bordered table-hover">
     <thead>
     <tr>
-      <th width="20%">POLICY NAME</th>
+      <th width="20%"></th>
       <th width="25%">USERS</th>
       <th width="25%">GROUPS</th>
       <th width="30%">ACCESS</th>
@@ -34,23 +34,30 @@
     <tbody>
     {{#each model.policies as |policy|}}
       <tr>
-        <td>{{policy.name}}</td>
-        <td>
-          {{#each policy.users as |user|}}
-            <span class="label label-success">{{user}}</span>
-          {{/each}}
-        </td>
-        <td>
-          {{#each policy.groups as |group|}}
-            <span class="label label-success">{{group}}</span>
-          {{/each}}
-        </td>
-        <td>
-          {{#each policy.accesses as |access|}}
-            <span class="label label-success">{{access}}</span>
-          {{/each}}
-        </td>
+        <td colspan="4">Policy Name: <span class="text-primary"><strong>{{policy.name}}</strong></span></td>
       </tr>
+      {{#each policy.conditions as |condition index|}}
+        <tr>
+          <td>{{#if (eq index 0)}}<strong>Conditions:</strong>{{/if}}</td>
+          <td>
+            {{#each condition.users as |user|}}
+              <span class="label label-success">{{user}}</span>
+            {{/each}}
+          </td>
+          <td>
+            {{#each condition.groups as |group|}}
+              <span class="label label-success">{{group}}</span>
+            {{/each}}
+          </td>
+          <td>
+            {{#each condition.accesses as |access|}}
+              <span class="label label-success">{{access}}</span>
+            {{/each}}
+          </td>
+        </tr>
+      {{/each}}
+
+
     {{/each}}
     </tbody>
   </table>


[34/50] [abbrv] ambari git commit: AMBARI-19931. HiveView2.0: DB panel shows up on Query result full screen (pallavkul)

Posted by nc...@apache.org.
AMBARI-19931. HiveView2.0: DB panel shows up on Query result full screen (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/77b5b16e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/77b5b16e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/77b5b16e

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 77b5b16e2bd68ed18e193104b56ad0bb434ee505
Parents: 1d1253a
Author: pallavkul <pa...@gmail.com>
Authored: Sat Feb 11 17:17:12 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Sat Feb 11 17:17:12 2017 +0530

----------------------------------------------------------------------
 .../resources/ui/app/routes/queries/query.js    | 43 +++++++++++++++++---
 .../src/main/resources/ui/app/services/query.js |  2 -
 .../src/main/resources/ui/app/styles/app.scss   |  1 -
 .../ui/app/templates/queries/query.hbs          |  4 +-
 4 files changed, 40 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/77b5b16e/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index 72682f5..88202ff 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -24,6 +24,9 @@ export default Ember.Route.extend({
   jobs: Ember.inject.service(),
   savedQueries: Ember.inject.service(),
 
+  isQueryEdidorPaneExpanded: false,
+  isQueryResultPanelExpanded: false,
+
   beforeModel(){
     let existingWorksheets = this.store.peekAll('worksheet');
     existingWorksheets.setEach('selected', false);
@@ -161,7 +164,6 @@ export default Ember.Route.extend({
       this.get('controller.model').set('selectedDb', db);
     },
 
-
     visualExplainQuery(){
       this.get('controller').set('isVisualExplainQuery', true );
       this.send('executeQuery');
@@ -389,15 +391,46 @@ export default Ember.Route.extend({
     },
 
     expandQueryEdidorPanel(){
+
+      if(!this.get('isQueryEdidorPaneExpanded')){
+        this.set('isQueryEdidorPaneExpanded', true);
+      } else {
+        this.set('isQueryEdidorPaneExpanded', false);
+      }
       Ember.$('.query-editor-panel').toggleClass('query-editor-full-width');
       Ember.$('.database-panel').toggleClass("hide");
+
     },
 
     expandQueryResultPanel(){
-      Ember.$('.query-editor-panel').toggleClass('query-editor-full-width');
-      Ember.$('.query-editor-container').toggleClass("hide");
-      Ember.$('.database-panel').toggleClass("hide");
-      this.send('adjustPanelSize');
+
+      if(!this.get('isQueryResultPanelExpanded')){
+
+        if(!this.get('isQueryEdidorPaneExpanded')){
+          Ember.$('.query-editor-container').addClass("hide");
+          Ember.$('.database-panel').addClass("hide");
+          Ember.$('.query-editor-panel').addClass('query-editor-full-width');
+        } else {
+
+          Ember.$('.query-editor-container').addClass("hide");
+        }
+        this.set('isQueryResultPanelExpanded', true);
+
+      } else {
+
+        if(!this.get('isQueryEdidorPaneExpanded')){
+          Ember.$('.query-editor-container').removeClass("hide");
+          Ember.$('.database-panel').removeClass("hide");
+          Ember.$('.query-editor-panel').removeClass('query-editor-full-width');
+        } else {
+
+          Ember.$('.query-editor-container').removeClass("hide");
+
+        }
+        this.set('isQueryResultPanelExpanded', false);
+
+      }
+
     },
 
     adjustPanelSize(){

http://git-wip-us.apache.org/repos/asf/ambari/blob/77b5b16e/contrib/views/hive20/src/main/resources/ui/app/services/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/query.js b/contrib/views/hive20/src/main/resources/ui/app/services/query.js
index b484c74..42d4fb0 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/services/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/query.js
@@ -80,6 +80,4 @@ export default Ember.Service.extend({
     });
   }
 
-
-
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/77b5b16e/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
index 6469b2e..a9c91c7 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
@@ -322,7 +322,6 @@ pre {
 }
 
 .query-result-table {
-  border-top: 1px solid darken($database-search-background, 25%);
   padding-top:10px;
 }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/77b5b16e/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
index 84992d7..5b2b5c2 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
@@ -54,12 +54,12 @@
         <div>
           <ul class="row nav nav-tabs inverse">
             <li class="editor-result-list active">
-              <a href="javascript:void(0)" class="editor-result-list-anchor" {{action 'showQueryEditorResult' }}>
+              <a href="javascript:void(0)" class="editor-result-list-anchor active" {{action 'showQueryEditorResult' }}>
                 {{fa-icon "file-text-o"}}&nbsp;&nbsp;RESULT
               </a>
             </li>
             <li class="log-list">
-              <a href="javascript:void(0)" class="log-list-anchor active" {{action 'showQueryEditorLog' }}>
+              <a href="javascript:void(0)" class="log-list-anchor" {{action 'showQueryEditorLog' }}>
                 {{fa-icon "list"}}&nbsp;&nbsp;LOG
               </a>
             </li>


[03/50] [abbrv] ambari git commit: AMBARI-19887 : Add AMS and Grafana to PERF cluster (Addendum patch) (avijayan)

Posted by nc...@apache.org.
AMBARI-19887 : Add AMS and Grafana to PERF cluster (Addendum patch) (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7abf4e63
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7abf4e63
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7abf4e63

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 7abf4e63f093d55c793b451d0435fc485757e6c7
Parents: 3c0f3c4
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Thu Feb 9 13:45:33 2017 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Thu Feb 9 13:45:33 2017 -0800

----------------------------------------------------------------------
 .../0.1.0/package/scripts/params.py             |  6 +++---
 .../PERF/1.0/hooks/before-ANY/scripts/hook.py   |  6 ++++--
 .../PERF/1.0/hooks/before-ANY/scripts/params.py |  2 ++
 .../1.0/hooks/before-INSTALL/scripts/hook.py    |  1 +
 .../1.0/hooks/before-RESTART/scripts/hook.py    |  1 +
 .../1.0/services/AMBARI_METRICS/metainfo.xml    | 20 ++++++++++++++++++++
 6 files changed, 31 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7abf4e63/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index dcc26b7..fcfe088 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -67,9 +67,9 @@ if not is_ams_distributed and len(ams_collector_list) > 1:
 
 if 'cluster-env' in config['configurations'] and \
     'metrics_collector_vip_host' in config['configurations']['cluster-env']:
-  metric_collector_host = config['configurations']['cluster-env']['metrics_collector_vip_host']
-else:
-  metric_collector_host = select_metric_collector_hosts_from_hostnames(ams_collector_hosts)
+  ams_collector_hosts = config['configurations']['cluster-env']['metrics_collector_vip_host']
+
+metric_collector_host = select_metric_collector_hosts_from_hostnames(ams_collector_hosts)
 
 random_metric_collector_host = select_metric_collector_hosts_from_hostnames(ams_collector_hosts)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7abf4e63/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-ANY/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-ANY/scripts/hook.py b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-ANY/scripts/hook.py
index ef409e2..d707c3b 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-ANY/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-ANY/scripts/hook.py
@@ -28,8 +28,10 @@ class BeforeAnyHook(Hook):
     env.set_params(params)
 
     #For AMS.
-    setup_users()
-    setup_java()
+    if params.service_name == 'AMBARI_METRICS':
+      setup_users()
+      if params.component_name == 'METRICS_COLLECTOR':
+        setup_java()
 
 if __name__ == "__main__":
   BeforeAnyHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/7abf4e63/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-ANY/scripts/params.py
index dee9d07..2c2c901 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-ANY/scripts/params.py
@@ -39,4 +39,6 @@ artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 jdk_location = config['hostLevelParams']['jdk_location']
 java_version = expect("/hostLevelParams/java_version", int)
 
+service_name = config["serviceName"]
+component_name = config["role"]
 sudo = AMBARI_SUDO_BINARY
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/7abf4e63/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
index f030cfc..833fdbc 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
@@ -35,6 +35,7 @@ CONF_SELECT_DEST = "/usr/bin/conf-select"
 class BeforeInstallHook(Hook):
 
   def hook(self, env):
+    self.run_custom_hook('before-ANY')
     print "Before Install Hook"
     cache_dir = self.extrakt_var_from_pythonpath(AMBARI_AGENT_CACHE_DIR)
     conf_select = os.path.join(cache_dir, CONF_SELECT_PY)

http://git-wip-us.apache.org/repos/asf/ambari/blob/7abf4e63/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-RESTART/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-RESTART/scripts/hook.py b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-RESTART/scripts/hook.py
index a366129..301288b 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-RESTART/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-RESTART/scripts/hook.py
@@ -22,6 +22,7 @@ from resource_management.libraries.script import Hook
 class BeforeRestartHook(Hook):
 
   def hook(self, env):
+    self.run_custom_hook('before-START')
     print "Before Restart Hook"
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/7abf4e63/ambari-server/src/main/resources/stacks/PERF/1.0/services/AMBARI_METRICS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/AMBARI_METRICS/metainfo.xml b/ambari-server/src/main/resources/stacks/PERF/1.0/services/AMBARI_METRICS/metainfo.xml
index 03a5ab5..a06e61d 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/services/AMBARI_METRICS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/services/AMBARI_METRICS/metainfo.xml
@@ -21,6 +21,26 @@
         <service>
             <name>AMBARI_METRICS</name>
             <extends>common-services/AMBARI_METRICS/0.1.0</extends>
+            <components>
+                <component>
+                    <name>METRICS_MONITOR</name>
+                    <displayName>Metrics Monitor</displayName>
+                    <category>SLAVE</category>
+                    <cardinality>0+</cardinality>
+                    <versionAdvertised>false</versionAdvertised>
+                    <commandScript>
+                        <script>scripts/metrics_monitor.py</script>
+                        <scriptType>PYTHON</scriptType>
+                        <timeout>1200</timeout>
+                    </commandScript>
+                    <logs>
+                        <log>
+                            <logId>ams_monitor</logId>
+                            <primary>true</primary>
+                        </log>
+                    </logs>
+                </component>
+            </components>
         </service>
     </services>
 </metainfo>
\ No newline at end of file


[28/50] [abbrv] ambari git commit: AMBARI-19971. HiveServerInteractive. (1). Use the correct value for Node(s) used for running LLAP Daemon(s). (2). Set 'num_llap_nodes' calculated value only for non Ambari managed queue.

Posted by nc...@apache.org.
AMBARI-19971. HiveServerInteractive. (1). Use the correct value for Node(s) used for running LLAP Daemon(s). (2). Set 'num_llap_nodes' calculated value only for non Ambari managed queue.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f4c83843
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f4c83843
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f4c83843

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: f4c83843674764918bd50443705af1fce9852a2b
Parents: 38a17a7
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Fri Feb 10 13:29:41 2017 -0800
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Fri Feb 10 15:13:48 2017 -0800

----------------------------------------------------------------------
 .../package/scripts/hive_server_interactive.py  | 12 ++---
 .../0.12.0.2.0/package/scripts/params_linux.py  |  2 +
 .../HIVE/configuration/hive-interactive-env.xml |  4 +-
 .../stacks/HDP/2.5/services/stack_advisor.py    |  9 +++-
 .../HIVE/configuration/hive-interactive-env.xml | 31 ++++++++++++-
 .../stacks/2.5/common/test_stack_advisor.py     | 49 ++++++++++++++------
 6 files changed, 83 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f4c83843/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index 1f1d9a8..2ab48ae 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@ -281,10 +281,9 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
 
       unique_name = "llap-slider%s" % datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S')
 
-      cmd = format("{stack_root}/current/hive-server2-hive2/bin/hive --service llap --instances {params.num_llap_nodes}"
-                   " --slider-am-container-mb {params.slider_am_container_mb} --size {params.llap_daemon_container_size}m"
-                   " --cache {params.hive_llap_io_mem_size}m --xmx {params.llap_heap_size}m --loglevel {params.llap_log_level}"
-                   " {params.llap_extra_slider_opts} --output {LLAP_PACKAGE_CREATION_PATH}/{unique_name}")
+      cmd = format("{stack_root}/current/hive-server2-hive2/bin/hive --service llap --slider-am-container-mb {params.slider_am_container_mb} "
+                   "--size {params.llap_daemon_container_size}m --cache {params.hive_llap_io_mem_size}m --xmx {params.llap_heap_size}m "
+                   "--loglevel {params.llap_log_level} {params.llap_extra_slider_opts} --output {LLAP_PACKAGE_CREATION_PATH}/{unique_name}")
 
       # Append params that are supported from Hive llap GA version.
       if params.stack_supports_hive_interactive_ga:
@@ -300,11 +299,12 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
         else:
           Logger.info("Setting slider_placement: 4, as llap_daemon_container_size : {0} <= 0.5 * "
                      "YARN NodeManager Memory({1})".format(params.llap_daemon_container_size, params.yarn_nm_mem))
-        cmd += format(" --slider-placement {slider_placement} --skiphadoopversion --skiphbasecp")
+        cmd += format(" --slider-placement {slider_placement} --skiphadoopversion --skiphbasecp --instances {params.num_llap_daemon_running_nodes}")
 
         # Setup the logger for the ga version only
         cmd += format(" --logger {params.llap_logger}")
-
+      else:
+        cmd += format(" --instances {params.num_llap_nodes}")
       if params.security_enabled:
         llap_keytab_splits = params.hive_llap_keytab_file.split("/")
         Logger.debug("llap_keytab_splits : {0}".format(llap_keytab_splits))

http://git-wip-us.apache.org/repos/asf/ambari/blob/f4c83843/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index a5618ca..936b194 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -685,6 +685,8 @@ if has_hive_interactive:
   num_retries_for_checking_llap_status = default('/configurations/hive-interactive-env/num_retries_for_checking_llap_status', 10)
   # Used in LLAP slider package creation
   yarn_nm_mem = config['configurations']['yarn-site']['yarn.nodemanager.resource.memory-mb']
+  if stack_supports_hive_interactive_ga:
+    num_llap_daemon_running_nodes = config['configurations']['hive-interactive-env']['num_llap_nodes_for_llap_daemons']
   num_llap_nodes = config['configurations']['hive-interactive-env']['num_llap_nodes']
   llap_daemon_container_size = config['configurations']['hive-interactive-site']['hive.llap.daemon.yarn.container.mb']
   llap_log_level = config['configurations']['hive-interactive-env']['llap_log_level']

http://git-wip-us.apache.org/repos/asf/ambari/blob/f4c83843/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
index e636e0c..ef9be76 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
@@ -45,8 +45,8 @@
   <property>
     <name>num_llap_nodes</name>
     <value>1</value>
-    <description>The number of Hive LLAP daemons to run.</description>
-    <display-name>Number of LLAP Nodes</display-name>
+    <description>Number of nodes used by Hive's LLAP, which includes nodes running : LLAP daemon, Slider and Tez App Master(s).</description>
+    <display-name>Number of nodes used by Hive's LLAP</display-name>
     <value-attributes>
       <type>int</type>
       <minimum>1</minimum>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f4c83843/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 99b6776..9fe8fc3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -951,10 +951,13 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
       Logger.info("DBG: Calculated '{0}' queue available capacity : {1}, using following: llap_daemon_selected_queue_cap : {2}, "
                     "yarn_min_container_size : {3}".format(llap_daemon_selected_queue_name, total_llap_mem_normalized,
                                                            llap_daemon_selected_queue_cap, yarn_min_container_size))
-      '''Rounding up numNodes so that we run more daemons, and utilitze more CPUs. The rest of the calcaulkations will take care of cutting this down if required'''
+      '''Rounding up numNodes so that we run more daemons, and utilitze more CPUs. The rest of the calcaulations will take care of cutting this down if required'''
       num_llap_nodes_requested = math.ceil(total_llap_mem_normalized / yarn_nm_mem_in_mb_normalized)
       Logger.info("DBG: Calculated 'num_llap_nodes_requested' : {0}, using following: total_llap_mem_normalized : {1}, "
                     "yarn_nm_mem_in_mb_normalized : {2}".format(num_llap_nodes_requested, total_llap_mem_normalized, yarn_nm_mem_in_mb_normalized))
+      # Pouplate the 'num_llap_nodes_requested' in config 'num_llap_nodes', a read only config for non-Ambari managed queue case.
+      putHiveInteractiveEnvProperty('num_llap_nodes', num_llap_nodes_requested)
+      Logger.info("Setting config 'num_llap_nodes' as : {0}".format(num_llap_nodes_requested))
       queue_am_fraction_perc = float(self.__getQueueAmFractionFromCapacityScheduler(capacity_scheduler_properties, llap_daemon_selected_queue_name))
       hive_tez_am_cap_available = queue_am_fraction_perc * total_llap_mem_normalized
       Logger.info("DBG: Calculated 'hive_tez_am_cap_available' : {0}, using following: queue_am_fraction_perc : {1}, "
@@ -1182,7 +1185,8 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
       Logger.info("User requested num_llap_nodes : {0}, but used/adjusted value for calculations is : {1}".format(num_llap_nodes_requested, num_llap_nodes))
     else:
       Logger.info("Used num_llap_nodes for calculations : {0}".format(num_llap_nodes_requested))
-    putHiveInteractiveEnvProperty('num_llap_nodes', num_llap_nodes)
+    putHiveInteractiveEnvProperty('num_llap_nodes_for_llap_daemons', num_llap_nodes)
+    Logger.info("Setting config 'num_llap_nodes_for_llap_daemons' as : {0}".format(num_llap_nodes))
 
     llap_container_size = long(llap_daemon_mem_per_node)
     putHiveInteractiveSiteProperty('hive.llap.daemon.yarn.container.mb', llap_container_size)
@@ -1243,6 +1247,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     putHiveInteractiveSitePropertyAttribute('hive.server2.tez.sessions.per.default.queue', "minimum", 1)
     putHiveInteractiveSitePropertyAttribute('hive.server2.tez.sessions.per.default.queue', "maximum", 1)
     putHiveInteractiveEnvProperty('num_llap_nodes', 0)
+    putHiveInteractiveEnvProperty('num_llap_nodes_for_llap_daemons', 0)
     putHiveInteractiveEnvPropertyAttribute('num_llap_nodes', "minimum", 1)
     putHiveInteractiveEnvPropertyAttribute('num_llap_nodes', "maximum", node_manager_cnt)
     putHiveInteractiveSiteProperty('hive.llap.daemon.yarn.container.mb', yarn_min_container_size)

http://git-wip-us.apache.org/repos/asf/ambari/blob/f4c83843/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-env.xml
index af656f4..b659205 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-env.xml
@@ -62,7 +62,36 @@
     </value-attributes>
     <on-ambari-upgrade add="false"/>
   </property>
-
+  <property>
+    <name>num_llap_nodes_for_llap_daemons</name>
+    <value>1</value>
+    <description>Number of Node(s) on which Hive LLAP daemon runs.</description>
+    <display-name>Number of Node(s) for running Hive LLAP daemon</display-name>
+    <value-attributes>
+      <type>int</type>
+      <minimum>1</minimum>
+      <increment-step>1</increment-step>
+    </value-attributes>
+    <depends-on>
+      <property>
+        <type>hive-interactive-env</type>
+        <name>num_llap_nodes</name>
+      </property>
+      <property>
+        <type>hive-interactive-env</type>
+        <name>enable_hive_interactive</name>
+      </property>
+      <property>
+        <type>hive-interactive-site</type>
+        <name>hive.llap.daemon.queue.name</name>
+      </property>
+      <property>
+        <type>capacity-scheduler</type>
+        <name>yarn.scheduler.capacity.root.queues</name>
+      </property>
+    </depends-on>
+    <on-ambari-upgrade add="true"/>
+  </property>
 
 
   <!-- hive-env.sh -->

http://git-wip-us.apache.org/repos/asf/ambari/blob/f4c83843/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index ec56bad..ef83bca 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -1169,8 +1169,8 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.server2.tez.sessions.per.default.queue'], '1')
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'minimum': '1', 'maximum': '4'})
 
-    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 3)
-
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
+    self.assertTrue('num_llap_nodes' not in configurations['hive-interactive-env']['properties'])
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '9216')
 
@@ -1369,6 +1369,8 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '3.0'})
 
     self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 3)
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
+
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '9548')
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.num.executors'], '1')
@@ -1561,7 +1563,9 @@ class TestHDP25StackAdvisor(TestCase):
 
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4'})
 
-    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 3)
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
+    self.assertTrue('num_llap_nodes' not in configurations['hive-interactive-env']['properties'])
+
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '48128')
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.num.executors'], '1')
@@ -1762,7 +1766,9 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.server2.tez.sessions.per.default.queue'], '1')
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4', 'minimum': '1'})
 
-    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 3)
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
+    self.assertTrue('num_llap_nodes' not in configurations['hive-interactive-env']['properties'])
+
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '38912')
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.num.executors'], '4')
@@ -1953,14 +1959,15 @@ class TestHDP25StackAdvisor(TestCase):
     configurations = {
     }
 
-
     self.stackAdvisor.recommendYARNConfigurations(configurations, clusterData, services, self.hosts)
-
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.server2.tez.sessions.per.default.queue'], '1')
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'minimum': '1', 'maximum': '4'})
     self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=66.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=66.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.root.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.mi
 nimum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=34.0\nyarn.scheduler.capacity.root.llap.capacity=34.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
 
-    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 3)
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
+    self.assertTrue('num_llap_nodes' not in configurations['hive-interactive-env']['properties'])
+
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '10571')
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.num.executors'], '3')
@@ -1988,7 +1995,7 @@ class TestHDP25StackAdvisor(TestCase):
   #         Small configuration test with 3 nodes - 'yarn.nodemanager.resource.memory-mb' : 2046 and 'yarn.scheduler.minimum-allocation-mb' : 682, representing a small GCE cluster.
   #
   #         Expected : Configurations values recommended for llap related configs.
-  def test_recommendYARNConfigurations_three_node_manager_llap_configs_updated_2(self):
+  def test_recommendYARNConfigurations_three_node_manager_llap_configs_updated_3(self):
     # 3 node managers and yarn.nodemanager.resource.memory-mb": "12288"
     services = {
       "services": [{
@@ -2158,7 +2165,9 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'minimum': '1', 'maximum': '3.0'})
     self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=66.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=66.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.root.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.mi
 nimum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=34.0\nyarn.scheduler.capacity.root.llap.capacity=34.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
 
-    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 3)
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
+    self.assertTrue('num_llap_nodes' not in configurations['hive-interactive-env']['properties'])
+
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '682')
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.num.executors'], '1')
@@ -2354,7 +2363,8 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=0.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=0.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.root.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.mini
 mum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=100.0\nyarn.scheduler.capacity.root.llap.capacity=100.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4'})
 
-    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 3)
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
+    self.assertTrue('num_llap_nodes' not in configurations['hive-interactive-env']['properties'])
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '202752')
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.num.executors'], '3')
@@ -2557,7 +2567,9 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.server2.tez.sessions.per.default.queue'], '1.0')
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4', 'minimum': '1'})
 
-    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 5)
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
+    self.assertTrue('num_llap_nodes' not in configurations['hive-interactive-env']['properties'])
+
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '36864')
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.num.executors'], '4')
@@ -2754,7 +2766,9 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.server2.tez.sessions.per.default.queue'], '1')
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4', 'minimum': '1'})
 
-    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 3)
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
+    self.assertTrue('num_llap_nodes' not in configurations['hive-interactive-env']['properties'])
+
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '203918')
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.num.executors'], '10')
@@ -2947,7 +2961,9 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEqual(configurations['capacity-scheduler']['properties'], {'capacity-scheduler': 'yarn.scheduler.capacity.root.accessible-node-labels=*\nyarn.scheduler.capacity.maximum-am-resource-percent=1\nyarn.scheduler.capacity.node-locality-delay=40\nyarn.scheduler.capacity.root.capacity=100\nyarn.scheduler.capacity.root.default.state=RUNNING\nyarn.scheduler.capacity.root.default.maximum-capacity=0.0\nyarn.scheduler.capacity.root.queues=default,llap\nyarn.scheduler.capacity.maximum-applications=10000\nyarn.scheduler.capacity.root.default.user-limit-factor=1\nyarn.scheduler.capacity.root.acl_administer_queue=*\nyarn.scheduler.capacity.root.default.acl_submit_applications=*\nyarn.scheduler.capacity.root.default.capacity=0.0\nyarn.scheduler.capacity.queue-mappings-override.enable=false\nyarn.scheduler.capacity.root.llap.user-limit-factor=1\nyarn.scheduler.capacity.root.llap.state=RUNNING\nyarn.scheduler.capacity.root.llap.ordering-policy=fifo\nyarn.scheduler.capacity.root.llap.mini
 mum-user-limit-percent=100\nyarn.scheduler.capacity.root.llap.maximum-capacity=100.0\nyarn.scheduler.capacity.root.llap.capacity=100.0\nyarn.scheduler.capacity.root.llap.acl_submit_applications=hive\nyarn.scheduler.capacity.root.llap.acl_administer_queue=hive\nyarn.scheduler.capacity.root.llap.maximum-am-resource-percent=1'})
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4'})
 
-    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 3)
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
+    self.assertTrue('num_llap_nodes' not in configurations['hive-interactive-env']['properties'])
+
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '202752')
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.num.executors'], '3')
@@ -3139,6 +3155,8 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4'})
 
     self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 3)
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
+
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '202752')
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.num.executors'], '3')
@@ -3361,6 +3379,8 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4', 'minimum': '1'})
 
     self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 3)
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 3)
+
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '204288')
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.num.executors'], '3')
@@ -3580,6 +3600,7 @@ class TestHDP25StackAdvisor(TestCase):
     self.stackAdvisor.recommendYARNConfigurations(configurations, clusterData, services, self.hosts)
 
     self.assertEqual(configurations['hive-interactive-env']['properties']['num_llap_nodes'], '0')
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 0)
 
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '2048')
@@ -3962,6 +3983,8 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEquals(configurations['hive-interactive-site']['property_attributes']['hive.server2.tez.sessions.per.default.queue'], {'maximum': '4', 'minimum': '1'})
 
     self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes'], 1)
+    self.assertTrue(configurations['hive-interactive-env']['properties']['num_llap_nodes_for_llap_daemons'], 1)
+
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.yarn.container.mb'], '200704')
 
     self.assertEqual(configurations['hive-interactive-site']['properties']['hive.llap.daemon.num.executors'], '3')


[13/50] [abbrv] ambari git commit: AMBARI-19418. Support setup queue priority in Ambari - Capacity scheduler view. (Akhil PB via gauravn7)

Posted by nc...@apache.org.
AMBARI-19418. Support setup queue priority in Ambari - Capacity scheduler view. (Akhil PB via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/97994e23
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/97994e23
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/97994e23

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 97994e23f3b6ec71ff9911e43d9c4b135487acba
Parents: 92cf561
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Fri Feb 10 15:46:59 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Fri Feb 10 15:46:59 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/controllers/queue.js  | 103 ++++++++++++++++++-
 .../main/resources/ui/app/controllers/queues.js |   2 +-
 .../src/main/resources/ui/app/models/queue.js   |  11 ++
 .../src/main/resources/ui/app/serializers.js    |   5 +
 .../src/main/resources/ui/app/store.js          |   9 ++
 .../main/resources/ui/app/templates/queue.hbs   |  55 +++++++---
 6 files changed, 171 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/97994e23/contrib/views/capacity-scheduler/src/main/resources/ui/app/controllers/queue.js
----------------------------------------------------------------------
diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/controllers/queue.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/controllers/queue.js
index 589dcd0..d3adeb4 100644
--- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/controllers/queue.js
+++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/controllers/queue.js
@@ -23,9 +23,9 @@ var _stopState = 'STOPPED';
 
 App.QueueController = Ember.ObjectController.extend({
   needs:['queues','configs'],
+  isPriorityUtilizationSupported: Ember.computed.alias('store.isPriorityUtilizationSupported'),
   isRangerEnabledForYarn : function() {
     var isRanger = this.get('controllers.configs.isRangerEnabledForYarn');
-    console.log("controllers.queue : isRanger : ", isRanger);
     if (isRanger == null || typeof isRanger == 'undefined') {
       return false;
     }
@@ -159,6 +159,16 @@ App.QueueController = Ember.ObjectController.extend({
    */
   orderingPolicyValues: [null,'fifo', 'fair'],
 
+  /**
+   * Possible array of options for ordering policy
+   * @type {Array}
+   */
+  orderingPolicyOptions: [
+    {label: '', value: null},
+    {label: 'FIFO', value: 'fifo'},
+    {label: 'Fair', value: 'fair'}
+  ],
+
 
   // COMPUTED PROPERTIES
 
@@ -344,6 +354,16 @@ App.QueueController = Ember.ObjectController.extend({
     return this.get('content.ordering_policy');
   }.property('content.ordering_policy'),
 
+  currentLeafQueueOP: function(key, val) {
+    if (arguments.length > 1 && this.get('content.isLeafQ')) {
+      if (!this.get('isFairOP')) {
+        this.send('rollbackProp', 'enable_size_based_weight', this.get('content'));
+      }
+      this.set('content.ordering_policy', val || null);
+    }
+    return this.get('content.ordering_policy');
+  }.property('content.ordering_policy'),
+
   /**
    * Does ordering policy is equal to 'fair'
    * @type {Boolean}
@@ -372,6 +392,87 @@ App.QueueController = Ember.ObjectController.extend({
     }.bind(this));
   }.observes('content'),
 
+  /**
+   * Add observer for queue priority.
+   * Sets ordering_policy=priority-utilization to parent queue if children queues have different priorities
+   * Also reset back t0 original ordering_policy if children have same zero priorities
+   * @method priorityObserver
+   */
+  priorityObserver: function() {
+    if (!this.get('isPriorityUtilizationSupported')) {
+      return;
+    }
+    var parentQueue = this.get('parentQueue');
+    if (parentQueue) {
+      var hasDifferent = this.isChildrenPrioritiesDifferent(parentQueue);
+      if (hasDifferent) {
+        this.setOrderingPolicyConfigs(parentQueue);
+      } else {
+        this.rollbackOrderingPolicyConfigs(parentQueue);
+      }
+    }
+  }.observes('content.priority'),
+
+  /**
+   * Returns boolean if children queues have different priorities for a given queue
+   * @method isChildrenPrioritiesDifferent
+   */
+  isChildrenPrioritiesDifferent: function(queue) {
+    var hasDifferent = false;
+    var children = queue.get('childrenQueues');
+    var priorities = children.map(function(que) {
+      return que.get('priority');
+    });
+    hasDifferent = priorities.some(function(prio) {
+      return prio > 0;
+    });
+    return hasDifferent;
+  },
+
+  /**
+   * Sets queue ordering_policy=priority-utilization when children queues have different priorities
+   * @method setOrderingPolicyConfigs
+   */
+  setOrderingPolicyConfigs: function(queue) {
+    queue.set('ordering_policy', 'priority-utilization');
+  },
+
+  /**
+   * Rollback queue ordering_policy
+   * @method rollbackOrderingPolicyConfigs
+   */
+  rollbackOrderingPolicyConfigs: function(queue) {
+    var changedAttrs = queue.changedAttributes();
+    if (changedAttrs.hasOwnProperty('ordering_policy') && changedAttrs['ordering_policy'][0] !== 'priority-utilization') {
+      this.send('rollbackProp', 'ordering_policy', queue);
+    } else {
+      queue.set('ordering_policy', null);
+    }
+  },
+
+  /**
+   * Add observer to watch queues ordering_policy
+   * In case, if leaf queue has ordering_policy=priority-utilization, set ordering_policy to default fifo, since leaf can not have priority-utilization ordering_policy
+   * Observed when all leaf queues are deleted and parent become leaf with ordering_policy=priority-utilization
+   * Deleting child queues would change parent ordering_policy based on the other children priorities
+   * @method watchQueueOrderingPolicy
+   */
+  watchQueueOrderingPolicy: function() {
+    if (!this.get('isPriorityUtilizationSupported')) {
+      return;
+    }
+    if (this.get('content.isLeafQ') && this.get('content.ordering_policy') == 'priority-utilization') {
+      this.set('content.ordering_policy', 'fifo');
+    }
+    if (!this.get('content.isLeafQ')) {
+      var queue = this.get('content');
+      if (this.isChildrenPrioritiesDifferent(queue)) {
+        this.setOrderingPolicyConfigs(queue);
+      } else {
+        this.rollbackOrderingPolicyConfigs(queue);
+      }
+    }
+  }.observes('content.isLeafQ', 'content', 'content.queuesArray.length'),
 
 
   // METHODS

http://git-wip-us.apache.org/repos/asf/ambari/blob/97994e23/contrib/views/capacity-scheduler/src/main/resources/ui/app/controllers/queues.js
----------------------------------------------------------------------
diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/controllers/queues.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/controllers/queues.js
index 2f9f2fe..ed2f60e 100644
--- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/controllers/queues.js
+++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/controllers/queues.js
@@ -202,7 +202,7 @@ App.QueuesController = Ember.ArrayController.extend({
   },
 
   saveConfigError:function (operation, error) {
-    var response = error.responseJSON;
+    var response = error.responseJSON || {};
     response.simpleMessage = operation.capitalize() + ' failed!';
     this.set('alertMessage',response);
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/97994e23/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js
----------------------------------------------------------------------
diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js
index 9198aa5..e574159 100644
--- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js
+++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js
@@ -255,6 +255,7 @@ App.Queue = DS.Model.extend({
   minimum_user_limit_percent: DS.attr('number', { defaultValue: 100 }),
   maximum_applications: DS.attr('number', { defaultValue: null }),
   maximum_am_resource_percent: DS.attr('number', { defaultValue: null }),
+  priority: DS.attr('number', {defaultValue: 0}),
 
   disable_preemption: DS.attr('string', {defaultValue: ''}),
   isPreemptionInherited: DS.attr('boolean', {defaultValue: true}),
@@ -283,6 +284,16 @@ App.Queue = DS.Model.extend({
     return this.get('_overCapacity') || !Em.isEmpty(this.get('labels').filterBy('overCapacity'));
   }.property('_overCapacity','labels.@each.overCapacity'),
 
+  childrenQueues: function() {
+    var queuesArray = this.get('queuesArray');
+    return this.store.all('queue')
+      .filterBy('depth', this.get('depth') + 1)
+      .filterBy('parentPath', this.get('path'))
+      .filter(function(queue) {
+        return queuesArray.contains(queue.get('name'));
+      });
+  }.property('queues'),
+
   isInvalidMaxCapacity: false,
   isInvalidLabelMaxCapacity: false,
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97994e23/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js
----------------------------------------------------------------------
diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js
index 940d0f8..43d087d 100644
--- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js
+++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/serializers.js
@@ -104,6 +104,7 @@ App.SerializerMixin = Em.Mixin.create({
           ordering_policy:               props[base_path + ".ordering-policy"] || null,
           enable_size_based_weight:      props[base_path + ".ordering-policy.fair.enable-size-based-weight"] || null,
           default_node_label_expression: props[base_path + ".default-node-label-expression"] || null,
+          priority:                      (props[base_path + ".priority"])? +props[base_path + ".priority"] : 0,
           labelsEnabled:                 props.hasOwnProperty(labelsPath),
           disable_preemption:            props[base_path + '.disable_preemption'] || '',
           isPreemptionInherited:         (props[base_path + '.disable_preemption'] !== undefined)?false:true
@@ -233,6 +234,10 @@ App.QueueSerializer = DS.RESTSerializer.extend(App.SerializerMixin,{
       json[this.PREFIX + "." + record.get('path') + ".ordering-policy.fair.enable-size-based-weight"] = record.get('enable_size_based_weight');
     }
 
+    if (this.get('store.isPriorityUtilizationSupported')) {
+      json[this.PREFIX + "." + record.get('path') + ".priority"] = record.get('priority') || 0;
+    }
+
     // do not set property if not set
     var ma = record.get('maximum_applications')||'';
     if (ma) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/97994e23/contrib/views/capacity-scheduler/src/main/resources/ui/app/store.js
----------------------------------------------------------------------
diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/store.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/store.js
index dce00c1..c9e9432 100644
--- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/store.js
+++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/store.js
@@ -168,6 +168,15 @@ App.ApplicationStore = DS.Store.extend({
     return false;
   }.property('stackId'),
 
+  isPriorityUtilizationSupported: function() {
+    var stackId = this.get('stackId');
+    var stackVersion = stackId.substr(stackId.indexOf('-') + 1);
+    if (stackVersion >= 2.6) {
+      return true;
+    }
+    return false;
+  }.property('stackId'),
+
   hasDeletedQueues:Em.computed.notEmpty('deletedQueues.[]'),
 
   deletedQueues:[],

http://git-wip-us.apache.org/repos/asf/ambari/blob/97994e23/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs b/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs
index 87f00d9..69f5b3b 100644
--- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs
+++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/templates/queue.hbs
@@ -363,28 +363,59 @@
                 </div>
                 {{/if}}
             </div>
-            <div class="form-group">
+            {{#if isPriorityUtilizationSupported}}
+              <div class="form-group">
+                 {{tooltip-label
+                  class="col-xs-6 control-label"
+                  label='Priority'
+                  message='The priority of queue, higher means more important. It should be a valid integer value, 0 is default value.'
+                }}
+                {{#if isOperator}}
+                  <div class="col-xs-6 control-value">
+                    {{int-input value=content.priority maxlength=10 class="input-sm input-int"}}
+                    {{#if queueDirtyFilelds.priority}}
+                    <div class="btn-group btn-group-xs" >
+                      <a {{action 'rollbackProp' 'priority' content}} href="#" class="btn btn-default btn-warning"><i class="fa fa-undo"></i></a>
+                    </div>
+                    {{/if}}
+                  </div>
+                {{else}}
+                  <div class="col-xs-6">
+                    <p class="form-control-static">{{content.priority}}</p>
+                  </div>
+                {{/if}}
+              </div>
+            {{/if}}
+            {{#if content.isLeafQ}}
+              <div class="form-group">
                 {{tooltip-label
                   class="col-xs-6 control-label"
-                  label='Ordering policy'
+                  label='Ordering Policy'
                   message='The ordering policy to use for applications scheduled to this queue. <br/> FIFO: Applications get available capacity based on order they are submitted <br/> Fair: Applications will get fair share of capacity, regardless of order submitted'}}
                 {{#if isOperator}}
-                <div class="col-xs-6 control-value input-percent-wrap">
-                  <div>
-                    {{view Ember.Select class="form-control input-sm" content=orderingPolicyValues value=currentOP }}
-                  </div>
-                  {{#if queueDirtyFilelds.ordering_policy}}
+                  <div class="col-xs-6 control-value input-percent-wrap">
+                    <div>
+                      {{view Ember.Select
+                        class="form-control input-sm"
+                        contentBinding="orderingPolicyOptions"
+                        optionValuePath="content.value"
+                        optionLabelPath="content.label"
+                        value=currentLeafQueueOP
+                      }}
+                    </div>
+                    {{#if queueDirtyFilelds.ordering_policy}}
                     <div class="btn-group btn-group-xs" >
                       <a {{action 'rollbackProp' 'ordering_policy' content}} href="#" class="btn btn-default btn-warning"><i class="fa fa-undo"></i></a>
                     </div>
-                  {{/if}}
-                </div>
+                    {{/if}}
+                  </div>
                 {{else}}
-                <div class="col-xs-6">
+                  <div class="col-xs-6">
                     <p class="form-control-static">{{content.ordering_policy}}</p>
-                </div>
+                  </div>
                 {{/if}}
-            </div>
+              </div>
+            {{/if}}
             {{#if isFairOP}}
               <div class="form-group">
                 {{tooltip-label


[39/50] [abbrv] ambari git commit: AMBARI-19970 : AMS graphs are not present on cluster with SSL. (avijayan)

Posted by nc...@apache.org.
AMBARI-19970 : AMS graphs are not present on cluster with SSL. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5d695d83
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5d695d83
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5d695d83

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 5d695d83d8d5f2e8b622757b434133bfaa125dc8
Parents: 6eac0f5
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Sun Feb 12 08:37:22 2017 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Sun Feb 12 08:37:22 2017 -0800

----------------------------------------------------------------------
 .../0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2  | 4 ----
 1 file changed, 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5d695d83/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
index 8c6f86f..8c20f2b 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
@@ -42,19 +42,16 @@ hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 hbase.period=30
 hbase.collector.hosts={{ams_collector_hosts}}
 hbase.port={{metric_collector_port}}
-hbase.protocol={{metric_collector_protocol}}
 
 jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 jvm.period=30
 jvm.collector.hosts={{ams_collector_hosts}}
 jvm.port={{metric_collector_port}}
-jvm.protocol={{metric_collector_protocol}}
 
 rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 rpc.period=30
 rpc.collector.hosts={{ams_collector_hosts}}
 rpc.port={{metric_collector_port}}
-rpc.protocol={{metric_collector_protocol}}
 
 *.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
 *.sink.timeline.slave.host.name={{hostname}}
@@ -63,7 +60,6 @@ hbase.sink.timeline.period={{metrics_collection_period}}
 hbase.sink.timeline.sendInterval={{metrics_report_interval}}000
 hbase.sink.timeline.collector.hosts={{ams_collector_hosts}}
 hbase.sink.timeline.port={{metric_collector_port}}
-hbase.sink.timeline.protocol={{metric_collector_protocol}}
 hbase.sink.timeline.serviceName-prefix=ams
 
 # HTTPS properties


[06/50] [abbrv] ambari git commit: AMBARI-19928. Solr grafana dashboards. (Willy Solaligue via yusaku)

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/00ed4159/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-solr-cores-dashboard.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-solr-cores-dashboard.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-solr-cores-dashboard.json
new file mode 100644
index 0000000..4529092
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-solr-cores-dashboard.json
@@ -0,0 +1,3162 @@
+{
+  "id": null,
+  "title": "Solr - Cores",
+  "originalTitle": "Solr - Cores",
+  "tags": ["solr"],
+  "style": "dark",
+  "timezone": "browser",
+  "editable": true,
+  "hideControls": false,
+  "sharedCrosshair": false,
+  "rows": [
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "25px",
+      "panels": [
+        {
+          "content": "<h4 align=\"center\">Solr Cores Dashboard</h4>",
+          "editable": true,
+          "error": false,
+          "height": "25px",
+          "id": 2,
+          "isNew": true,
+          "links": [],
+          "mode": "html",
+          "span": 12,
+          "style": {},
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "title": "Row"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)",
+            "thresholdLine": false
+          },
+          "height": "",
+          "id": 4,
+          "isNew": true,
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "adds",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "solr.admin.mbeans.updateHandler.adds",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Indexing Throughput",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "none",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 5,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "deletesById",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.updateHandler.deletesById",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "deletesByQuery",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.updateHandler.deletesByQuery",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Deletes",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "none",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 6,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "errors",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.updateHandler.errors",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Errors",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "none",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 7,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "docsPending",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.updateHandler.docsPending",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Pending Documents",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Indexing"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "height": "350px",
+          "id": 3,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "requests",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.requests",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "avg",
+              "alias": "avgRequestsPerSecond",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.avgRequestsPerSecond",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "avg",
+              "alias": "5minRateReqsPerSecond",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.5minRateReqsPerSecond",
+              "precision": "default",
+              "refId": "C",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "avg",
+              "alias": "15minRateReqsPerSecond",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.15minRateReqsPerSecond",
+              "precision": "default",
+              "refId": "D",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Select Handler Requests",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "none",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "height": "350px",
+          "id": 8,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "avgTimePerRequest",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.avgTimePerRequest",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "medianRequestTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.medianRequestTime",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "75thPcRequestTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.75thPcRequestTime",
+              "precision": "default",
+              "refId": "C",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "95thPcRequestTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.95thPcRequestTime",
+              "precision": "default",
+              "refId": "D",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "99thPcRequestTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.99thPcRequestTime",
+              "precision": "default",
+              "refId": "E",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "999thPcRequestTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.999thPcRequestTime",
+              "precision": "default",
+              "refId": "F",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Time Per Select Handler Request",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "height": "350px",
+          "id": 9,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "browse.requests",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.browse.requests",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "export.requests",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.export.requests",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "get.requests",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.get.requests",
+              "precision": "default",
+              "refId": "C",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "query.requests",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.query.requests",
+              "precision": "default",
+              "refId": "D",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Other Handler Requests",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "height": "350px",
+          "id": 10,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "browse.avgTimePerRequest",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.browse.avgTimePerRequest",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "export.avgTimePerRequest",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.export.avgTimePerRequest",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "get.avgTimePerRequest",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.get.avgTimePerRequest",
+              "precision": "default",
+              "refId": "C",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "query.avgTimePerRequest",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.query.avgTimePerRequest",
+              "precision": "default",
+              "refId": "D",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Average Time for Other Handlers",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Queries"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 11,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "hitratio",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.filterCache.hitratio",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Filter Cache Hit Ratio",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 12,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "size",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.filterCache.size",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Filter Cache Size",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 13,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "warmupTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.filterCache.warmupTime",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Filter Cache Warmup Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 14,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "hitratio",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.documentCache.hitratio",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Document Cache Hit Ratio",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 15,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "size",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.documentCache.size",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Document Cache Size",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 16,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "warmupTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.documentCache.warmupTime",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Document Cache Warmup Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 17,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "hitratio",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.queryResultCache.hitratio",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Query Result Cache Hit Ratio",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 18,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "size",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.queryResultCache.size",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Query Result Cache Size",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 19,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "warmupTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.queryResultCache.warmupTime",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Query Result Cache Warmup Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Caches"
+    }
+  ],
+  "time": {
+    "from": "now-30m",
+    "to": "now"
+  },
+  "timepicker": {
+    "now": true,
+    "refresh_intervals": [
+      "5s",
+      "10s",
+      "30s",
+      "1m",
+      "5m",
+      "15m",
+      "30m",
+      "1h",
+      "2h",
+      "1d"
+    ],
+    "time_options": [
+      "5m",
+      "15m",
+      "1h",
+      "6h",
+      "12h",
+      "24h",
+      "2d",
+      "7d",
+      "30d"
+    ]
+  },
+  "templating": {
+    "list": [
+      {
+        "allFormat": "glob",
+        "current": {
+          "text": "solr-core-app",
+          "value": "solr-core-app"
+        },
+        "datasource": null,
+        "includeAll": false,
+        "multi": false,
+        "multiFormat": "glob",
+        "name": "components",
+        "options": [
+          {
+            "selected": true,
+            "text": "solr-core-app",
+            "value": "solr-core-app"
+          }
+        ],
+        "query": "solr-core-app",
+        "refresh": false,
+        "type": "custom"
+      },
+      {
+        "allFormat": "glob",
+        "current": {
+        },
+        "datasource": null,
+        "includeAll": false,
+        "multi": false,
+        "multiFormat": "glob",
+        "name": "hosts",
+        "options": [
+
+        ],
+        "query": "hosts",
+        "refresh": true,
+        "regex": "",
+        "type": "query"
+      }
+    ]
+  },
+  "annotations": {
+    "list": []
+  },
+  "refresh": false,
+  "schemaVersion": 8,
+  "version": 20,
+  "links": []
+}
+{
+  "id": null,
+  "title": "Solr Cores Dashboard",
+  "originalTitle": "Solr Cores Dashboard",
+  "tags": ["solr"],
+  "style": "dark",
+  "timezone": "browser",
+  "editable": true,
+  "hideControls": false,
+  "sharedCrosshair": false,
+  "rows": [
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "25px",
+      "panels": [
+        {
+          "content": "<h4 align=\"center\">Solr Cores Dashboard</h4>",
+          "editable": true,
+          "error": false,
+          "height": "25px",
+          "id": 2,
+          "isNew": true,
+          "links": [],
+          "mode": "html",
+          "span": 12,
+          "style": {},
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "title": "Row"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)",
+            "thresholdLine": false
+          },
+          "height": "",
+          "id": 4,
+          "isNew": true,
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "adds",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "solr.admin.mbeans.updateHandler.adds",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Indexing Throughput",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "none",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 5,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "deletesById",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.updateHandler.deletesById",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "deletesByQuery",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.updateHandler.deletesByQuery",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Deletes",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "none",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 6,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "errors",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.updateHandler.errors",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Errors",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "none",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 7,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "docsPending",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.updateHandler.docsPending",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Pending Documents",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Indexing"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "height": "350px",
+          "id": 3,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "requests",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.requests",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "avg",
+              "alias": "avgRequestsPerSecond",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.avgRequestsPerSecond",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "avg",
+              "alias": "5minRateReqsPerSecond",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.5minRateReqsPerSecond",
+              "precision": "default",
+              "refId": "C",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "avg",
+              "alias": "15minRateReqsPerSecond",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.15minRateReqsPerSecond",
+              "precision": "default",
+              "refId": "D",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Select Handler Requests",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "none",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "height": "350px",
+          "id": 8,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "avgTimePerRequest",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.avgTimePerRequest",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "medianRequestTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.medianRequestTime",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "75thPcRequestTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.75thPcRequestTime",
+              "precision": "default",
+              "refId": "C",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "95thPcRequestTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.95thPcRequestTime",
+              "precision": "default",
+              "refId": "D",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "99thPcRequestTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.99thPcRequestTime",
+              "precision": "default",
+              "refId": "E",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "999thPcRequestTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.select.999thPcRequestTime",
+              "precision": "default",
+              "refId": "F",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Time Per Select Handler Request",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "height": "350px",
+          "id": 9,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "browse.requests",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.browse.requests",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "export.requests",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.export.requests",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "get.requests",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.get.requests",
+              "precision": "default",
+              "refId": "C",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "query.requests",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.query.requests",
+              "precision": "default",
+              "refId": "D",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Other Handler Requests",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "height": "350px",
+          "id": 10,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "browse.avgTimePerRequest",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.browse.avgTimePerRequest",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "export.avgTimePerRequest",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.export.avgTimePerRequest",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "get.avgTimePerRequest",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.get.avgTimePerRequest",
+              "precision": "default",
+              "refId": "C",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            },
+            {
+              "aggregator": "none",
+              "alias": "query.avgTimePerRequest",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.queryHandler.query.avgTimePerRequest",
+              "precision": "default",
+              "refId": "D",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Average Time for Other Handlers",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Queries"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 11,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "hitratio",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.filterCache.hitratio",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Filter Cache Hit Ratio",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 12,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "size",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.filterCache.size",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Filter Cache Size",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 13,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "warmupTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.filterCache.warmupTime",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Filter Cache Warmup Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 14,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "hitratio",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.documentCache.hitratio",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Document Cache Hit Ratio",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 15,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "size",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.documentCache.size",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Document Cache Size",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 16,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "warmupTime",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.documentCache.warmupTime",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Document Cache Warmup Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 17,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "hitratio",
+              "app": "solr-core-app",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "solr.admin.mbeans.cache.queryResultCache.hitratio",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Query Result Cache Hit Ratio",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {
+          },
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 18,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "none",
+              "alias": "size",
+              "app": "solr-core-app",
+              "downsampleAggregator": "a

<TRUNCATED>

[40/50] [abbrv] ambari git commit: AMBARI-19452 : Metrics of flume shows same values for all channels, sinks or sources in host. (wangjianfei via avijayan)

Posted by nc...@apache.org.
AMBARI-19452 : Metrics of flume shows same values for all channels,sinks or sources in host. (wangjianfei via avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3a951927
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3a951927
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3a951927

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 3a9519279e3f3b9ba113fb1bc1d3241e6bd9f2a8
Parents: 5d695d8
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Sun Feb 12 14:23:54 2017 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Sun Feb 12 14:23:54 2017 -0800

----------------------------------------------------------------------
 .../controller/metrics/timeline/AMSPropertyProvider.java     | 8 ++++++++
 1 file changed, 8 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3a951927/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
index f194f8e..759f595 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
@@ -441,6 +441,14 @@ public abstract class AMSPropertyProvider extends MetricsPropertyProvider {
                   } else {
                     propertyId = substituteArgument(propertyId, "$1", metric.getInstanceId());
                   }
+                }else {
+                  if(metric.getInstanceId() != null){
+                    //instanceId "CHANNEL.ch1"
+                    String instanceId = metric.getInstanceId();
+                    instanceId = instanceId.matches("^\\w+\\..+$") ? instanceId.split("\\.")[1]:"";
+                    //propertyId "metrics/flume/flume/CHANNEL/ch1/[ChannelCapacity]"
+                    if(!propertyId.contains(instanceId)) continue;
+                  }
                 }
                 Object value = getValue(metric, temporalInfo);
                 if (value != null && !containsArguments(propertyId)) {


[32/50] [abbrv] ambari git commit: AMBARI-19958. Hive View 2.0 - deleting a saved query is buggy (pallavkul)

Posted by nc...@apache.org.
AMBARI-19958. Hive View 2.0 - deleting a saved query is buggy (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/23642958
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/23642958
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/23642958

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 23642958208fe6b3148ac157753edb13b6d31347
Parents: d96d209
Author: pallavkul <pa...@gmail.com>
Authored: Sat Feb 11 16:58:22 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Sat Feb 11 16:58:22 2017 +0530

----------------------------------------------------------------------
 .../resources/ui/app/adapters/saved-query.js    |  2 +-
 .../resources/ui/app/components/query-editor.js |  2 +-
 .../resources/ui/app/controllers/saved-query.js | 22 +++++++++
 .../resources/ui/app/routes/queries/query.js    | 11 +++--
 .../resources/ui/app/routes/savedqueries.js     | 50 +++++++++++++++-----
 .../resources/ui/app/services/saved-queries.js  |  5 +-
 .../ui/app/templates/queries/query.hbs          |  2 +-
 .../resources/ui/app/templates/savedqueries.hbs |  8 ++--
 8 files changed, 75 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/23642958/contrib/views/hive20/src/main/resources/ui/app/adapters/saved-query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/saved-query.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/saved-query.js
index 5ee757b..a25adc7 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/adapters/saved-query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/saved-query.js
@@ -21,6 +21,6 @@ import ApplicationAdapter from './application';
 
 export default ApplicationAdapter.extend({
   buildURL(){
-    return this._super(...arguments).replace('/resources','') + '/savedQueries/';
+    return this._super(...arguments).replace('/resources','');
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/23642958/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js b/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
index 7bfe223..3175a35 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
@@ -101,7 +101,7 @@ export default Ember.Component.extend({
       }
     }
 
-    this.sendAction('updateQuery');
+    this.sendAction('updateQuery', query);
 
 
   }.observes('query'),

http://git-wip-us.apache.org/repos/asf/ambari/blob/23642958/contrib/views/hive20/src/main/resources/ui/app/controllers/saved-query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/controllers/saved-query.js b/contrib/views/hive20/src/main/resources/ui/app/controllers/saved-query.js
new file mode 100644
index 0000000..dc99fd1
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/controllers/saved-query.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Controller.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/23642958/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index dcf27b4..4f60229 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -114,7 +114,6 @@ export default Ember.Route.extend({
     controller.set('isVisualExplainQuery', false);
     controller.set('visualExplainJson', null);
 
-
   },
 
 
@@ -122,7 +121,7 @@ export default Ember.Route.extend({
     createQuery(udfName, udfClassname, fileResourceName, fileResourcePath){
       let query = "add jar "+ fileResourcePath + ";\ncreate temporary function " + udfName + " as '"+ udfClassname+ "';";
       this.get('controller').set('currentQuery', query);
-      this.get('controller.model').set('currentQuery', query );
+      this.get('controller.model').set('query', query );
     },
 
     changeDbHandler(selectedDBs){
@@ -220,6 +219,7 @@ export default Ember.Route.extend({
 
         self.get('jobs').waitForJobToComplete(data.job.id, 2 * 1000, false)
           .then((status) => {
+
               self.get('controller').set('isJobSuccess', true);
               self.send('getJob', data);
 
@@ -318,8 +318,9 @@ export default Ember.Route.extend({
       });
     },
 
-    updateQuery(){
-      console.log('I am in update query.');
+    updateQuery(query){
+      this.get('controller').set('currentQuery', query);
+      this.get('controller.model').set('query', query);
     },
 
     goNextPage(){
@@ -411,7 +412,7 @@ export default Ember.Route.extend({
       console.log('I am in saveWorksheetModal');
       let newTitle = $('#worksheet-title').val();
 
-      let currentQuery = this.get('controller').get('currentQuery');
+      let currentQuery = this.get('controller.model').get('query');
       let selectedDb = this.get('controller.model').get('selectedDb');
       let owner = this.get('controller.model').get('owner');
       let queryFile = this.get('controller.model').get('queryFile');

http://git-wip-us.apache.org/repos/asf/ambari/blob/23642958/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js b/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js
index 7b9cb09..c2a0c8c 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js
@@ -23,11 +23,14 @@ export default Ember.Route.extend({
   savedQueries: Ember.inject.service(),
 
   model() {
-    return this.get('savedQueries').getAllQueries();
+    return this.store.findAll('savedQuery').then(savedQueries => savedQueries.toArray());
   },
 
   setupController(controller, model) {
     this._super(...arguments);
+
+    controller.set('savedQuerylist', model);
+
     controller.set('showDeleteSaveQueryModal', false);
     controller.set('selectedSavedQueryId', null);
   },
@@ -39,19 +42,40 @@ export default Ember.Route.extend({
 
     deleteSavedQuery(){
       let queryId = this.get('controller').get('selectedSavedQueryId');
+      let self = this;
 
       console.log('deleteSavedQuery', queryId);
-      this.get('savedQueries').deleteSaveQuery(queryId)
-        .then((data) => {
-          console.log('Deleted saved query.', data);
-          this.get('controller').set('showDeleteSaveQueryModal', false );
-          //$(window).reload();
-        }, (error) => {
-          console.log("Error encountered", error);
+
+      this.get('store').queryRecord('saved-query', { filter: { id: queryId } }, {reload: true}).then(function(record) {
+        record.destroyRecord().then(function(data) {
+          self.send('deleteSavedQueryDeclined');
+          self.send('refreshSavedQueryList');
+        })
+      }, (error) => {
+        console.log('error', error);
+      });
+    },
+
+    refreshSavedQueryList(){
+      this.get('store').findAll('saved-query').then(data => {
+        let savedQueryList = [];
+        data.forEach(x => {
+          let localSavedQuery = {
+            'id': x.get('id'),
+            'dataBase': x.get('dataBase'),
+            'title': x.get('title'),
+            'queryFile': x.get('queryFile'),
+            'owner': x.get('owner'),
+            'shortQuery': x.get('shortQuery')
+          };
+          savedQueryList.pushObject(localSavedQuery);
         });
+
+        this.get('controller').set('savedQuerylist',savedQueryList);
+      })
     },
 
-    deleteSavedQuerypDeclined(){
+    deleteSavedQueryDeclined(){
       this.get('controller').set('selectedSavedQueryId', null);
       this.get('controller').set('showDeleteSaveQueryModal', false );
     },
@@ -85,10 +109,10 @@ export default Ember.Route.extend({
 
       let localWs = {
         id: worksheetId,
-        title: savedQuery.title,
-        query: savedQuery.shortQuery,
-        selectedDb : savedQuery.dataBase,
-        owner: savedQuery.owner,
+        title: savedQuery.get('title'),
+        query: savedQuery.get('shortQuery'),
+        selectedDb : savedQuery.get('dataBase'),
+        owner: savedQuery.get('owner'),
         selected: true
       };
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/23642958/contrib/views/hive20/src/main/resources/ui/app/services/saved-queries.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/saved-queries.js b/contrib/views/hive20/src/main/resources/ui/app/services/saved-queries.js
index 29ef4b5..80a19f3 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/services/saved-queries.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/saved-queries.js
@@ -30,15 +30,16 @@ export default Ember.Service.extend({
   saveQuery(payload){
     return $.ajax({
       type: "POST",
-      url: this.get('store').adapterFor('saved-query').buildURL(),
+      url: this.get('store').adapterFor('saved-query').buildURL() + '/savedQueries/',
       data: JSON.stringify({savedQuery: payload}) ,
       contentType:"application/json; charset=utf-8",
       dataType:"json",
       headers: {'X-Requested-By': 'ambari'}
     })
   },
+
   deleteSaveQuery(id){
-    let deletURL = this.get('store').adapterFor('saved-query').buildURL() + id;
+    let deletURL = this.get('store').adapterFor('saved-query').buildURL()+ '/savedQueries/' + id;
 
     return $.ajax({
       type: "DELETE",

http://git-wip-us.apache.org/repos/asf/ambari/blob/23642958/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
index 2d6c5aa..84992d7 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
@@ -25,7 +25,7 @@
         <span class="expand-button" {{action "expandQueryEdidorPanel" }} >
           {{fa-icon "expand"}}
         </span>
-        {{query-editor query=currentQuery }}
+        {{query-editor query=currentQuery updateQuery='updateQuery' }}
       </div>
       <div class="row query-editor-controls">
         <button class="btn btn-success" {{action "executeQuery" }} disabled={{ worksheet.isQueryRunning}}>{{fa-icon "check"}} Execute</button>

http://git-wip-us.apache.org/repos/asf/ambari/blob/23642958/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
index 36dc982..c3d907d 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
@@ -29,7 +29,7 @@
       </tr>
       </thead>
       <tbody>
-      {{#each model.savedQueries as |savedQuery| }}
+      {{#each savedQuerylist as |savedQuery| }}
         <tr>
           <td>{{savedQuery.shortQuery}}</td>
           <td class="break-word">{{savedQuery.title}}</td>
@@ -37,8 +37,8 @@
           <td>{{savedQuery.owner}}</td>
           <td>
               <div class="dropdown">
-                <a class="dropdown-toggle" data-toggle="dropdown">{{fa-icon "cog"}}</a>
-                <ul class="dropdown-menu dropdown-menu-right"">
+                <a class="dropdown-toggle" id="dropdownMenu1121" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true">{{fa-icon "cog"}}</a>
+                <ul class="dropdown-menu dropdown-menu-right" aria-labelledby="dropdownMenu">
                   <li><a href="#" {{action "historySavedQuery" savedQuery.id }} class="text-uppercase">{{fa-icon "history"}} History</a></li>
                   <li><a href="#" {{action "openDeleteSavedQueryModal" savedQuery.id}} class="text-uppercase">{{fa-icon "remove"}} Delete</a></li>
                   <li><a href="#" {{action "openAsWorksheet" savedQuery }} class="text-uppercase">{{fa-icon "folder-open-o"}} Open as worksheet</a></li>
@@ -63,7 +63,7 @@
   closable=false
   confirmClass="success"
   confirm="deleteSavedQuery"
-  reject="deleteSavedQuerypDeclined"
+  reject="deleteSavedQueryDeclined"
   }}
 {{/if}}
 


[02/50] [abbrv] ambari git commit: AMBARI-19870. Add Superset as a UI for Druid in HDP (Nishant Bangarwa via smohanty)

Posted by nc...@apache.org.
AMBARI-19870. Add Superset as a UI for Druid in HDP (Nishant Bangarwa via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3c0f3c46
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3c0f3c46
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3c0f3c46

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 3c0f3c46b97f00a24cf0711c43bc24df3ed6df84
Parents: d5755ba
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Thu Feb 9 12:32:17 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Thu Feb 9 12:32:17 2017 -0800

----------------------------------------------------------------------
 .../DRUID/0.9.2/configuration/druid-common.xml  |  10 +-
 .../0.9.2/configuration/druid-superset-env.xml  | 115 ++++++++++++
 .../0.9.2/configuration/druid-superset.xml      | 178 +++++++++++++++++++
 .../common-services/DRUID/0.9.2/metainfo.xml    |  26 ++-
 .../DRUID/0.9.2/package/scripts/params.py       |  45 ++++-
 .../0.9.2/package/scripts/status_params.py      |   2 +
 .../DRUID/0.9.2/package/scripts/superset.py     | 153 ++++++++++++++++
 .../DRUID/0.9.2/package/templates/superset.sh   |  95 ++++++++++
 .../DRUID/0.9.2/quicklinks/quicklinks.json      |  13 ++
 .../DRUID/0.9.2/role_command_order.json         |  18 ++
 .../DRUID/0.9.2/themes/theme.json               |  84 ++++++++-
 .../stacks/HDP/2.6/role_command_order.json      |  19 --
 .../stacks/HDP/2.6/services/DRUID/kerberos.json |  33 ++++
 .../stacks/HDP/2.6/services/stack_advisor.py    |  11 ++
 .../test/python/stacks/2.6/configs/default.json |   3 +
 15 files changed, 774 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-common.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-common.xml b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-common.xml
index a494750..d3b53cd 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-common.xml
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-common.xml
@@ -132,20 +132,20 @@
   <property>
     <name>database_name</name>
     <value>druid</value>
-    <display-name>Metadata storage database name</display-name>
-    <description>Metadata storage database name</description>
+    <display-name>Druid Metadata storage database name</display-name>
+    <description>Druid Metadata storage database name</description>
     <on-ambari-upgrade add="false"/>
   </property>
   <property>
     <name>metastore_hostname</name>
     <value>localhost</value>
-    <display-name>Metadata storage hostname name</display-name>
-    <description>Metadata storage hostname name</description>
+    <display-name>Metadata storage hostname</display-name>
+    <description>Metadata storage hostname</description>
     <on-ambari-upgrade add="false"/>
   </property>
   <property require-input="true">
     <name>druid.metadata.storage.type</name>
-    <display-name>Metadata storage type</display-name>
+    <display-name>Druid Metadata storage type</display-name>
     <value>derby</value>
     <value-attributes>
       <overridable>false</overridable>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-superset-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-superset-env.xml b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-superset-env.xml
new file mode 100644
index 0000000..61421c0
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-superset-env.xml
@@ -0,0 +1,115 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+<configuration>
+  <property>
+    <name>superset_log_dir</name>
+    <value>/var/log/superset</value>
+    <display-name>Superset log dir</display-name>
+    <description>Superset log directory.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>superset_pid_dir</name>
+    <value>/var/run/superset</value>
+    <display-name>Superset pid dir</display-name>
+    <description>Superset pid directory.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>superset_admin_user</name>
+    <value>admin</value>
+    <display-name>Superset Admin Username</display-name>
+    <description>
+      Superset Username. This value cannot be modified by Ambari
+      except on initial install. Please make sure the username change in
+      Superset is reflected in Ambari.
+    </description>
+    <value-attributes>
+      <type>db_user</type>
+      <overridable>false</overridable>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property require-input="true">
+    <name>superset_admin_password</name>
+    <value/>
+    <property-type>PASSWORD</property-type>
+    <display-name>Superset Admin Password</display-name>
+    <description>
+      Superset password. This value cannot be modified by Ambari
+      except on initial install. Please make sure the password change in
+      Superset is reflected back in Ambari.
+    </description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <type>password</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>superset_admin_firstname</name>
+    <value></value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>superset_admin_lastname</name>
+    <value></value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>superset_admin_email</name>
+    <value></value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>superset-env template</display-name>
+    <value>
+      # Set environment variables here.
+
+      # Superset Home Dir
+      export SUPERSET_CONFIG_DIR={{superset_config_dir}}
+
+      # Superset Log Dir
+      export SUPERSET_LOG_DIR={{superset_log_dir}}
+
+      # Superset Log Dir
+      export SUPERSET_BIN_DIR={{superset_bin_dir}}
+
+      # Superset PID Dir
+      export SUPERSET_PID_DIR={{superset_pid_dir}}
+
+      # Add superset_config.py to PYTHONPATH
+      export PYTHONPATH=$PYTHONPATH:{{superset_config_dir}}
+
+      export SUPERSET_WEBSERVER_PORT={{superset_webserver_port}}
+      export SUPERSET_WEBSERVER_ADDRESS={{superset_webserver_address}}
+      export SUPERSET_TIMEOUT={{superset_timeout}}
+      export SUPERSET_WORKERS={{superset_workers}}
+    </value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-superset.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-superset.xml b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-superset.xml
new file mode 100644
index 0000000..2fff10a
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-superset.xml
@@ -0,0 +1,178 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+  <property>
+    <name>ROW_LIMIT</name>
+    <value>5000</value>
+    <value-attributes>
+      <type>int</type>
+      <minimum>0</minimum>
+    </value-attributes>
+    <description></description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>SUPERSET_WORKERS</name>
+    <value>4</value>
+    <value-attributes>
+      <type>int</type>
+      <minimum>0</minimum>
+    </value-attributes>
+    <description></description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>SUPERSET_WEBSERVER_PORT</name>
+    <value>9088</value>
+    <description></description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>SUPERSET_WEBSERVER_ADDRESS</name>
+    <value>0.0.0.0</value>
+    <description></description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>SUPERSET_TIMEOUT</name>
+    <value>60</value>
+    <description></description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property require-input="true">
+    <name>SUPERSET_DATABASE_TYPE</name>
+    <display-name>Superset Database type</display-name>
+    <value>sqlite</value>
+    <value-attributes>
+      <overridable>false</overridable>
+      <type>value-list</type>
+      <entries>
+        <entry>
+          <value>mysql</value>
+          <label>MYSQL</label>
+        </entry>
+        <entry>
+          <value>sqlite</value>
+          <label>SQLITE</label>
+        </entry>
+        <entry>
+          <value>postgresql</value>
+          <label>POSTGRESQL</label>
+        </entry>
+      </entries>
+    </value-attributes>
+    <description>Type of the Database. Note that sqlite db will be installed on the node where superset is installed.
+      Use mysql or postgred when installing superset on multiple nodes.
+      mysql installed by ambari is only for development and not suitable for production use cases due to it being not HA.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>SUPERSET_DATABASE_NAME</name>
+    <value>superset</value>
+    <display-name>Superset Database name</display-name>
+    <description>Superset Database name</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>SUPERSET_DATABASE_PASSWORD</name>
+    <value></value>
+    <property-type>PASSWORD</property-type>
+    <display-name>Superset Database password</display-name>
+    <description>Password for the database.</description>
+    <value-attributes>
+      <type>password</type>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>SUPERSET_DATABASE_USER</name>
+    <value>superset</value>
+    <display-name>Superset Database user</display-name>
+    <description>Superset Database user</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>SUPERSET_DATABASE_PORT</name>
+    <value></value>
+    <display-name>Superset Database port</display-name>
+    <description>Superset Database port</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <depends-on>
+      <property>
+        <type>superset</type>
+        <name>SUPERSET_DATABASE_TYPE</name>
+      </property>
+    </depends-on>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>SUPERSET_DATABASE_NAME</name>
+    <value>superset</value>
+    <display-name>Superset Database name</display-name>
+    <description>Superset Database name</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>SUPERSET_DATABASE_HOSTNAME</name>
+    <value>localhost</value>
+    <display-name>Database hostname</display-name>
+    <description>Database hostname</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>WTF_CSRF_ENABLED</name>
+    <value>True</value>
+    <description></description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>ENABLE_PROXY_FIX</name>
+    <value>True</value>
+    <description>When using Superset beind a Proxy server set this to True.</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>MAPBOX_API_KEY</name>
+    <value></value>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <description>API key to enable Mapbox visualizations</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>SECRET_KEY</name>
+    <value></value>
+    <value-attributes>
+      <type>password</type>
+    </value-attributes>
+    <description>Secret Key used to encrypt user passwords. A Long Random String is recommended.</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/common-services/DRUID/0.9.2/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/metainfo.xml b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/metainfo.xml
index c837bbb..c0859f2 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/metainfo.xml
@@ -158,10 +158,24 @@
             <scriptType>PYTHON</scriptType>
             <timeout>600</timeout>
           </commandScript>
+          <configuration-dependencies>
+            <config-type>druid-router</config-type>
+          </configuration-dependencies>
+        </component>
+        <component>
+          <name>DRUID_SUPERSET</name>
+          <displayName>Druid Superset</displayName>
+          <category>MASTER</category>
+          <cardinality>1+</cardinality>
+          <commandScript>
+            <script>scripts/superset.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+          <configuration-dependencies>
+            <config-type>druid-superset</config-type>
+          </configuration-dependencies>
         </component>
-        <configuration-dependencies>
-          <config-type>druid-router</config-type>
-        </configuration-dependencies>
       </components>
 
       <themes>
@@ -178,6 +192,9 @@
             <package>
               <name>druid_${stack_version}</name>
             </package>
+            <package>
+              <name>superset_${stack_version}</name>
+            </package>
           </packages>
         </osSpecific>
         <osSpecific>
@@ -186,6 +203,9 @@
             <package>
               <name>druid-${stack_version}</name>
             </package>
+            <package>
+              <name>superset-${stack_version}</name>
+            </package>
           </packages>
         </osSpecific>
       </osSpecifics>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
index aed4043..61f35b2 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
@@ -149,7 +149,6 @@ HdfsResource = functools.partial(
   dfs_type=dfs_type
 )
 
-
 # Ambari Metrics
 metric_emitter_type = "noop"
 metric_collector_host = ""
@@ -184,3 +183,47 @@ if has_metric_collector:
         metric_collector_protocol = 'http'
     pass
 
+superset_home_dir = format("{stack_root}/current/druid-superset")
+superset_bin_dir = format("{superset_home_dir}/bin")
+superset_log_dir = default("/configurations/druid-superset-env/superset_log_dir", '/var/log/superset')
+superset_pid_dir = status_params.superset_pid_dir
+superset_config_dir = '/etc/superset/conf'
+superset_admin_user = config['configurations']['druid-superset-env']['superset_admin_user']
+superset_admin_password = config['configurations']['druid-superset-env']['superset_admin_password']
+superset_admin_firstname = config['configurations']['druid-superset-env']['superset_admin_firstname']
+superset_admin_lastname = config['configurations']['druid-superset-env']['superset_admin_lastname']
+superset_admin_email = config['configurations']['druid-superset-env']['superset_admin_email']
+superset_env_sh_template = config['configurations']['druid-superset-env']['content']
+superset_protocol = "http"
+superset_webserver_address=config['configurations']['druid-superset']['SUPERSET_WEBSERVER_ADDRESS']
+superset_webserver_port = config['configurations']['druid-superset']['SUPERSET_WEBSERVER_PORT']
+superset_timeout = config['configurations']['druid-superset']['SUPERSET_TIMEOUT']
+superset_workers =  config['configurations']['druid-superset']['SUPERSET_WORKERS']
+superset_hosts = default('/clusterHostInfo/superset_hosts', None)
+
+# superset database configs
+superset_db_type = config['configurations']['druid-superset']['SUPERSET_DATABASE_TYPE']
+superset_db_name = config['configurations']['druid-superset']['SUPERSET_DATABASE_NAME']
+superset_db_password = config['configurations']['druid-superset']['SUPERSET_DATABASE_PASSWORD']
+superset_db_user = config['configurations']['druid-superset']['SUPERSET_DATABASE_USER']
+superset_db_port = config['configurations']['druid-superset']['SUPERSET_DATABASE_PORT']
+superset_db_host = config['configurations']['druid-superset']['SUPERSET_DATABASE_HOSTNAME']
+
+superset_db_uri = None
+if superset_db_type == "sqlite":
+  superset_db_uri = format("sqlite:///{superset_config_dir}/{superset_db_name}.db")
+elif superset_db_type == "postgresql":
+  superset_db_uri = format("postgresql+pygresql://{superset_db_user}:{superset_db_password}@{superset_db_host}:{superset_db_port}/{superset_db_name}")
+elif superset_db_type == "mysql":
+  superset_db_uri = format("mysql+pymysql://{superset_db_user}:{superset_db_password}@{superset_db_host}:{superset_db_port}/{superset_db_name}")
+
+druid_coordinator_hosts = default("/clusterHostInfo/druid_coordinator_hosts", [])
+druid_coordinator_host = ""
+if not len(druid_coordinator_hosts) == 0:
+  druid_coordinator_host = druid_coordinator_hosts[0]
+druid_router_hosts = default("/clusterHostInfo/druid_coordinator_hosts", [])
+druid_router_host = ""
+if not len(druid_router_hosts) == 0:
+  druid_router_host = druid_router_hosts[0]
+druid_coordinator_port = config['configurations']['druid-coordinator']['druid.port']
+druid_router_port = config['configurations']['druid-router']['druid.port']

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/status_params.py
index ee1d61c..d48ff83 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/status_params.py
@@ -22,3 +22,5 @@ from resource_management.libraries.script.script import Script
 config = Script.get_config()
 
 druid_pid_dir = config['configurations']['druid-env']['druid_pid_dir']
+superset_pid_dir = config['configurations']['druid-superset-env']['superset_pid_dir']
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
new file mode 100644
index 0000000..6a56e25
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
@@ -0,0 +1,153 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+import os, time
+from resource_management.core.resources.system import Directory
+from resource_management import Script
+from resource_management.libraries.resources.properties_file import PropertiesFile
+from resource_management.core.resources.system import Execute
+from resource_management.core.source import Template
+from resource_management.libraries.functions.show_logs import show_logs
+from resource_management.core.source import InlineTemplate
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.check_process_status import check_process_status
+from resource_management.core.resources import File
+
+class Superset(Script):
+
+  def get_component_name(self):
+    return format("druid-superset")
+
+  def install(self, env):
+    self.install_packages(env)
+
+  def configure(self, env):
+    import params
+    Directory(
+      [params.superset_pid_dir, params.superset_log_dir, params.superset_config_dir, params.superset_home_dir],
+      mode=0755,
+      cd_access='a',
+      owner=params.druid_user,
+      group=params.user_group,
+      create_parents=True,
+      recursive_ownership=True
+    )
+
+    File(format("{params.superset_config_dir}/superset-env.sh"),
+         mode=0755,
+         owner=params.druid_user,
+         group=params.user_group,
+         content=InlineTemplate(params.superset_env_sh_template)
+         )
+
+    File(os.path.join(params.superset_bin_dir, 'superset.sh'),
+         owner=params.druid_user,
+         group=params.user_group,
+         mode=0755,
+         content=Template("superset.sh")
+         )
+    superset_config =  mutable_config_dict(params.config["configurations"]["druid-superset"])
+
+    if params.superset_db_uri:
+      superset_config["SQLALCHEMY_DATABASE_URI"] = params.superset_db_uri
+
+    PropertiesFile("superset_config.py",
+                   dir=params.superset_config_dir,
+                   properties=quote_string_values(superset_config),
+                   owner=params.druid_user,
+                   group=params.user_group
+                   )
+
+    # Initialize DB and create admin user.
+    Execute(format("source {params.superset_config_dir}/superset-env.sh ; {params.superset_bin_dir}/superset db upgrade"),
+            user=params.druid_user)
+    Execute(format("source {params.superset_config_dir}/superset-env.sh ; {params.superset_bin_dir}/fabmanager create-admin --app superset --username {params.superset_admin_user} --password {params.superset_admin_password} --firstname {params.superset_admin_firstname} --lastname {params.superset_admin_lastname} --email {params.superset_admin_email}"),
+            user=params.druid_user)
+    Execute(format("source {params.superset_config_dir}/superset-env.sh ; {params.superset_bin_dir}/superset init"),
+            user=params.druid_user)
+
+    # Configure Druid Cluster in superset DB
+    Execute(format("source {params.superset_config_dir}/superset-env.sh ; {params.superset_bin_dir}/superset configure_druid_cluster --name druid-ambari --coordinator-host {params.druid_coordinator_host} --coordinator-port {params.druid_coordinator_port} --broker-host {params.druid_router_host} --broker-port {params.druid_router_port} --coordinator-endpoint druid/coordinator/v1/metadata --broker-endpoint druid/v2"),
+            user=params.druid_user)
+
+  def start(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env)
+    daemon_cmd = self.get_daemon_cmd(params, "start")
+    try:
+      Execute(daemon_cmd,
+              user=params.druid_user
+              )
+    except:
+      show_logs(params.druid_log_dir, params.druid_user)
+      raise
+
+
+  def stop(self, env):
+    import params
+    env.set_params(params)
+
+    daemon_cmd = self.get_daemon_cmd(params, "stop")
+    try:
+      Execute(daemon_cmd,
+              user=params.druid_user
+              )
+    except:
+      show_logs(params.druid_log_dir, params.druid_user)
+      raise
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    pid_file = status_params.superset_pid_dir + '/superset.pid'
+    check_process_status(pid_file)
+
+  def get_log_folder(self):
+    import params
+    return params.druid_log_dir
+
+  def get_user(self):
+    import params
+    return params.druid_user
+
+  def get_daemon_cmd(self, params=None, command=None):
+    return format('source {params.superset_config_dir}/superset-env.sh ; {params.superset_bin_dir}/superset.sh {command}')
+
+def mutable_config_dict(config):
+  rv = {}
+  for key, value in config.iteritems():
+    rv[key] = value
+  return rv
+
+def quote_string_values(config):
+  rv = {}
+  for key, value in config.iteritems():
+    rv[key] = quote_string_value(value)
+  return rv
+
+def quote_string_value(value):
+  if value.lower() == "true" or value.lower() == "false" or value.isdigit():
+    return value
+  else:
+    return "'{}'".format(value)
+
+
+
+if __name__ == "__main__":
+  Superset().execute()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/templates/superset.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/templates/superset.sh b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/templates/superset.sh
new file mode 100644
index 0000000..3e327a6
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/templates/superset.sh
@@ -0,0 +1,95 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+## Runs superset as a daemon
+## Environment Variables used by this script -
+## SUPERSET_CONFIG_DIR - directory having druid config files
+## SUPERSET_LOG_DIR - directory used to store superset logs
+## SUPERSET_PID_DIR - directory used to store pid file
+
+usage="Usage: superset.sh (start|stop|status)"
+
+if [ $# -le 0 ]; then
+  echo $usage
+  exit 1
+fi
+
+command=$1
+
+CONF_DIR="${SUPERSET_CONFIG_DIR:=/etc/superset/conf}"
+LOG_DIR="${SUPERSET_LOG_DIR:=/var/log/superset}"
+PID_DIR="${SUPERSET_PID_DIR:=/var/run/superset}"
+TIMEOUT="${SUPERSET_TIMEOUT:=60}"
+WEBSERVER_ADDRESS="${SUPERSET_WEBSERVER_ADDRESS:=0.0.0.0}"
+WEBSERVER_PORT="${SUPERSET_WEBSERVER_PORT:=9088}"
+WORKERS="${SUPERSET_WORKERS:=4}"
+BIN_DIR="${SUPERSET_BIN_DIR}"
+
+pid=$PID_DIR/superset.pid
+
+case $command in
+  (start)
+
+    if [ -f $pid ]; then
+      if kill -0 `cat $pid| head -n 1` > /dev/null 2>&1; then
+        echo Superset node running as process `cat $pid | head -n 1`.  Stop it first.
+        exit 1
+      fi
+    fi
+
+    $BIN_DIR/gunicorn -D --workers $WORKERS -p $pid --log-file $LOG_DIR/superset.log -t $TIMEOUT -b $WEBSERVER_ADDRESS:$WEBSERVER_PORT --limit-request-line 0 --limit-request-field_size 0 superset:app
+
+    echo "Started Superset"
+    ;;
+
+  (stop)
+
+    if [ -f $pid ]; then
+      TARGET_PID=`cat $pid | head -n 1`
+      if kill -0 $TARGET_PID > /dev/null 2>&1; then
+        echo Stopping process `cat $pid | head -n 1`...
+        kill $TARGET_PID
+      else
+        echo No superset node to stop
+      fi
+      rm -f $pid
+    else
+      echo No superset node to stop
+    fi
+    ;;
+
+   (status)
+    if [ -f $pid ]; then
+      if kill -0 `cat $pid | head -n 1` > /dev/null 2>&1; then
+        echo RUNNING
+        exit 0
+      else
+        echo STOPPED
+      fi
+    else
+      echo STOPPED
+    fi
+    ;;
+
+  (*)
+    echo $usage
+    exit 1
+    ;;
+esac
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/common-services/DRUID/0.9.2/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/quicklinks/quicklinks.json b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/quicklinks/quicklinks.json
index c68b9b9..16f5d5c 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/quicklinks/quicklinks.json
@@ -31,6 +31,19 @@
           "regex": "^(\\d+)$",
           "site": "druid-overlord"
         }
+      },
+      {
+        "name": "superset",
+        "label": "Superset",
+        "component_name": "DRUID_SUPERSET",
+        "requires_user_name": "false",
+        "url": "%@://%@:%@",
+        "port": {
+          "http_property": "SUPERSET_WEBSERVER_PORT",
+          "http_default_port": "9088",
+          "regex": "^(\\d+)$",
+          "site": "druid-superset"
+        }
       }
     ]
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/common-services/DRUID/0.9.2/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/role_command_order.json b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/role_command_order.json
new file mode 100644
index 0000000..8f65c2e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/role_command_order.json
@@ -0,0 +1,18 @@
+{
+  "general_deps" : {
+    "_comment" : "dependencies for Druid",
+    "DRUID_HISTORICAL-START" : ["ZOOKEEPER_SERVER-START", "NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START"],
+    "DRUID_OVERLORD-START" : ["ZOOKEEPER_SERVER-START", "NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START"],
+    "DRUID_MIDDLEMANAGER-START" : ["ZOOKEEPER_SERVER-START", "NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START"],
+    "DRUID_BROKER-START" : ["ZOOKEEPER_SERVER-START", "NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START"],
+    "DRUID_ROUTER-START" : ["ZOOKEEPER_SERVER-START", "NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START"],
+    "DRUID_COORDINATOR-START" : ["ZOOKEEPER_SERVER-START", "NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START"],
+    "DRUID_SUPERSET-START" : ["DRUID_COORDINATOR-START", "DRUID_BROKER-START"],
+    "DRUID_OVERLORD-RESTART" : ["DRUID_HISTORICAL-RESTART"],
+    "DRUID_MIDDLEMANAGER-RESTART" : ["DRUID_OVERLORD-RESTART"],
+    "DRUID_BROKER-RESTART" : ["DRUID_MIDDLEMANAGER-RESTART"],
+    "DRUID_ROUTER-RESTART" : ["DRUID_BROKER-RESTART"],
+    "DRUID_COORDINATOR-RESTART" : ["DRUID_ROUTER-RESTART"],
+    "DRUID_SERVICE_CHECK-SERVICE_CHECK" : ["DRUID_HISTORICAL-START", "DRUID_COORDINATOR-START", "DRUID_OVERLORD-START", "DRUID_MIDDLEMANAGER-START", "DRUID_BROKER-START", "DRUID_ROUTER-START", "DRUID_SUPERSET-START"]
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/common-services/DRUID/0.9.2/themes/theme.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/themes/theme.json b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/themes/theme.json
index f494594..151478b 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/themes/theme.json
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/themes/theme.json
@@ -10,7 +10,7 @@
             "name": "metadata_storage",
             "display-name": "META DATA STORAGE CONFIG",
             "layout": {
-              "tab-columns": "1",
+              "tab-columns": "2",
               "tab-rows": "1",
               "sections": [
                 {
@@ -19,17 +19,25 @@
                   "row-index": "0",
                   "column-index": "0",
                   "row-span": "2",
-                  "column-span": "1",
+                  "column-span": "2",
                   "section-columns": "1",
                   "section-rows": "1",
                   "subsections": [
                     {
                       "name": "subsection-metadata-storage-row1-col1",
-                      "display-name": "META DATA STORAGE",
+                      "display-name": "DRUID META DATA STORAGE",
                       "row-index": "0",
                       "column-index": "0",
                       "row-span": "1",
                       "column-span": "1"
+                    },
+                    {
+                      "name": "subsection-metadata-storage-row1-col2",
+                      "display-name": "SUPERSET META DATA STORAGE",
+                      "row-index": "0",
+                      "column-index": "1",
+                      "row-span": "1",
+                      "column-span": "1"
                     }
                   ]
                 }
@@ -69,6 +77,34 @@
         {
           "config": "druid-common/druid.metadata.storage.connector.connectURI",
           "subsection-name": "subsection-metadata-storage-row1-col1"
+        },
+        {
+          "config": "druid-superset/SUPERSET_DATABASE_NAME",
+          "subsection-name": "subsection-metadata-storage-row1-col2"
+        },
+        {
+          "config": "druid-superset/SUPERSET_DATABASE_TYPE",
+          "subsection-name": "subsection-metadata-storage-row1-col2"
+        },
+        {
+          "config": "druid-superset/SUPERSET_DATABASE_USER",
+          "subsection-name": "subsection-metadata-storage-row1-col2"
+        },
+        {
+          "config": "druid-superset/SUPERSET_DATABASE_PASSWORD",
+          "subsection-name": "subsection-metadata-storage-row1-col2"
+        },
+        {
+          "config": "druid-superset/SUPERSET_DATABASE_HOSTNAME",
+          "subsection-name": "subsection-metadata-storage-row1-col2"
+        },
+        {
+          "config": "druid-superset/SUPERSET_DATABASE_PORT",
+          "subsection-name": "subsection-metadata-storage-row1-col2"
+        },
+        {
+          "config": "druid-superset/SECRET_KEY",
+          "subsection-name": "subsection-metadata-storage-row1-col2"
         }
       ]
     },
@@ -114,6 +150,48 @@
         "widget": {
           "type": "text-field"
         }
+      },
+      {
+        "config": "druid-superset/SUPERSET_DATABASE_NAME",
+        "widget": {
+          "type": "text-field"
+        }
+      },
+      {
+        "config": "druid-superset/SUPERSET_DATABASE_TYPE",
+        "widget": {
+          "type": "combo"
+        }
+      },
+      {
+        "config": "druid-superset/SUPERSET_DATABASE_USER",
+        "widget": {
+          "type": "text-field"
+        }
+      },
+      {
+        "config": "druid-superset/SUPERSET_DATABASE_PASSWORD",
+        "widget": {
+          "type": "password"
+        }
+      },
+      {
+        "config": "druid-superset/SUPERSET_DATABASE_HOSTNAME",
+        "widget": {
+          "type": "text-field"
+        }
+      },
+      {
+        "config": "druid-superset/SUPERSET_DATABASE_PORT",
+        "widget": {
+          "type": "text-field"
+        }
+      },
+      {
+        "config": "druid-superset/SECRET_KEY",
+        "widget": {
+          "type": "password"
+        }
       }
     ]
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/stacks/HDP/2.6/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.6/role_command_order.json
deleted file mode 100644
index 66b6ed2..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/role_command_order.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
-  "_comment" : "Record format:",
-  "_comment" : "blockedRole-blockedCommand: [blockerRole1-blockerCommand1, blockerRole2-blockerCommand2, ...]",
-  "general_deps" : {
-    "_comment" : "dependencies for all cases",
-    "DRUID_HISTORICAL-START" : ["ZOOKEEPER_SERVER-START", "NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START"],
-    "DRUID_OVERLORD-START" : ["ZOOKEEPER_SERVER-START", "NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START"],
-    "DRUID_MIDDLEMANAGER-START" : ["ZOOKEEPER_SERVER-START", "NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START"],
-    "DRUID_BROKER-START" : ["ZOOKEEPER_SERVER-START", "NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START"],
-    "DRUID_ROUTER-START" : ["ZOOKEEPER_SERVER-START", "NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START"],
-    "DRUID_COORDINATOR-START" : ["ZOOKEEPER_SERVER-START", "NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START"],
-    "DRUID_OVERLORD-RESTART" : ["DRUID_HISTORICAL-RESTART"],
-    "DRUID_MIDDLEMANAGER-RESTART" : ["DRUID_OVERLORD-RESTART"],
-    "DRUID_BROKER-RESTART" : ["DRUID_MIDDLEMANAGER-RESTART"],
-    "DRUID_ROUTER-RESTART" : ["DRUID_BROKER-RESTART"],
-    "DRUID_COORDINATOR-RESTART" : ["DRUID_ROUTER-RESTART"],
-    "DRUID_SERVICE_CHECK-SERVICE_CHECK" : ["DRUID_HISTORICAL-START", "DRUID_COORDINATOR-START", "DRUID_OVERLORD-START", "DRUID_MIDDLEMANAGER-START", "DRUID_BROKER-START", "DRUID_ROUTER-START"]
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json
index 251975b..0ba5071 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json
@@ -34,6 +34,27 @@
           }
         },
         {
+          "name": "superset",
+          "principal": {
+            "value": "${druid-env/druid_user}@${realm}",
+            "type": "user",
+            "configuration": "druid-superset/KERBEROS_PRINCIPAL",
+            "local_username": "${druid-env/druid_user}"
+          },
+          "keytab": {
+            "file": "${keytab_dir}/superset.headless.keytab",
+            "owner": {
+              "name": "${druid-env/druid_user}",
+              "access": "r"
+            },
+            "group": {
+              "name": "${cluster-env/user_group}",
+              "access": "r"
+            },
+            "configuration": "druid-superset/KERBEROS_KEYTAB"
+          }
+        },
+        {
           "name": "/smokeuser"
         }
       ],
@@ -77,6 +98,14 @@
               "name": "/druid"
             }
           ]
+        },
+        {
+          "name": "DRUID_SUPERSET",
+          "identities": [
+            {
+              "name": "/druid"
+            }
+          ]
         }
       ],
       "configurations": [
@@ -84,6 +113,10 @@
           "druid-common": {
             "druid.hadoop.security.spnego.excludedPaths": "[\"/status\"]",
             "druid.security.extensions.loadList" : "[\"druid-kerberos\"]"
+          },
+          "druid-superset": {
+            "ENABLE_KERBEROS_AUTHENTICATION" : "True",
+            "KERBEROS_REINIT_TIME_SEC" : 3600
           }
         }
       ],

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
index 957d625..e6ebd6f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
@@ -127,6 +127,17 @@ class HDP26StackAdvisor(HDP25StackAdvisor):
               putComponentProperty('druid.processing.numThreads', processingThreads)
               putComponentProperty('druid.server.http.numThreads', max(10, (totalAvailableCpu * 17) / 16 + 2) + 30)
 
+      # superset is in list of services to be installed
+      if 'druid-superset' in services['configurations']:
+        # Recommendations for Superset
+        superset_database_type = services['configurations']["druid-superset"]["properties"]["SUPERSET_DATABASE_TYPE"]
+        putSupersetProperty = self.putProperty(configurations, "druid-superset", services)
+
+        if superset_database_type == "mysql":
+            putSupersetProperty("SUPERSET_DATABASE_PORT", "3306")
+        elif superset_database_type == "postgresql":
+            putSupersetProperty("SUPERSET_DATABASE_PORT", "5432")
+
   def getMetadataConnectionString(self, database_type):
       driverDict = {
           'mysql': 'jdbc:mysql://{0}:{2}/{1}?createDatabaseIfNotExist=true',

http://git-wip-us.apache.org/repos/asf/ambari/blob/3c0f3c46/ambari-server/src/test/python/stacks/2.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/configs/default.json b/ambari-server/src/test/python/stacks/2.6/configs/default.json
index 4d9f98c..a1f89ce 100644
--- a/ambari-server/src/test/python/stacks/2.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.6/configs/default.json
@@ -459,6 +459,9 @@
     },
     "druid-logrotate" : {
       "content" : "<![CDATA[\n    {{druid_log_dir}}/*.log {\n        copytruncate\n        rotate 7\n        daily\n        nocompress\n        missingok\n        notifempty\n        create 660 druid users\n        dateext\n        dateformat -%Y-%m-%d-%s\n        }\n      ]]>\n"
+    },
+    "druid-superset" : {
+      "SUPERSET_DATABASE_TYPE" : "sqllite"
     }
   },
   "configuration_attributes": {


[42/50] [abbrv] ambari git commit: AMBARI-19919: spark/livy (1.x) should not be configured in Zeppelin's interpreter if they are not installed (Prabhjyot Singh via r-kamath)

Posted by nc...@apache.org.
AMBARI-19919: spark/livy (1.x) should not be configured in Zeppelin's interpreter if they are not installed (Prabhjyot Singh via r-kamath)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a9060614
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a9060614
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a9060614

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: a9060614ffe9cd6812eb8ff9cbb6fd81572f66e2
Parents: b695bf2
Author: Renjith Kamath <re...@gmail.com>
Authored: Mon Feb 13 13:27:06 2017 +0530
Committer: Renjith Kamath <re...@gmail.com>
Committed: Mon Feb 13 13:29:00 2017 +0530

----------------------------------------------------------------------
 .../package/scripts/livy2_config_template.py    | 107 +++++++++++++++++++
 .../0.6.0.2.5/package/scripts/master.py         |  42 ++++++--
 .../0.6.0.2.5/package/scripts/params.py         |   8 ++
 3 files changed, 147 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a9060614/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/livy2_config_template.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/livy2_config_template.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/livy2_config_template.py
new file mode 100644
index 0000000..71d3817
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/livy2_config_template.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+template = '''
+{
+  "id": "2C8A4SZ9T_livy2",
+  "status": "READY",
+  "group": "livy",
+  "name": "livy2",
+  "properties": {
+    "zeppelin.livy.keytab": "",
+    "zeppelin.livy.spark.sql.maxResult": "1000",
+    "livy.spark.executor.instances": "",
+    "livy.spark.executor.memory": "",
+    "livy.spark.dynamicAllocation.enabled": "",
+    "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "",
+    "livy.spark.dynamicAllocation.initialExecutors": "",
+    "zeppelin.livy.session.create_timeout": "120",
+    "livy.spark.driver.memory": "",
+    "zeppelin.livy.displayAppInfo": "false",
+    "livy.spark.jars.packages": "",
+    "livy.spark.dynamicAllocation.maxExecutors": "",
+    "zeppelin.livy.concurrentSQL": "false",
+    "zeppelin.livy.principal": "",
+    "livy.spark.executor.cores": "",
+    "zeppelin.livy.url": "http://localhost:8998",
+    "zeppelin.livy.pull_status.interval.millis": "1000",
+    "livy.spark.driver.cores": "",
+    "livy.spark.dynamicAllocation.minExecutors": ""
+  },
+  "interpreterGroup": [
+    {
+      "class": "org.apache.zeppelin.livy.LivySparkInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "scala"
+      },
+      "name": "spark",
+      "defaultInterpreter": false
+    },
+    {
+      "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "sql"
+      },
+      "name": "sql",
+      "defaultInterpreter": false
+    },
+    {
+      "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "python"
+      },
+      "name": "pyspark",
+      "defaultInterpreter": false
+              },
+    {
+      "class": "org.apache.zeppelin.livy.LivyPySpark3Interpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "python"
+      },
+      "name": "pyspark3",
+      "defaultInterpreter": false
+    },
+    {
+      "class": "org.apache.zeppelin.livy.LivySparkRInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "r"
+      },
+      "name": "sparkr",
+      "defaultInterpreter": false
+    }
+  ],
+  "dependencies": [],
+  "option": {
+    "setPermission": false,
+    "remote": true,
+    "users": [],
+    "isExistingProcess": false,
+    "perUser": "scoped",
+    "isUserImpersonate": false,
+    "perNote": "shared",
+    "port": -1
+  }
+}
+'''

http://git-wip-us.apache.org/repos/asf/ambari/blob/a9060614/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
index 9af48e5..8a1fad6 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
@@ -289,13 +289,18 @@ class Master(Script):
     config_data = self.get_interpreter_settings()
     interpreter_settings = config_data['interpreterSettings']
 
-    if params.spark2_home:
+    if 'spark2-env' in params.config['configurations']:
       spark2_config = self.get_spark2_interpreter_config()
       config_id = spark2_config["id"]
       interpreter_settings[config_id] = spark2_config
 
-    for interpreter_setting in interpreter_settings:
-      interpreter = interpreter_settings[interpreter_setting]
+    if 'livy2-env' in params.config['configurations']:
+      livy2_config = self.get_livy2_interpreter_config()
+      config_id = livy2_config["id"]
+      interpreter_settings[config_id] = livy2_config
+
+    for setting_key in interpreter_settings.keys():
+      interpreter = interpreter_settings[setting_key]
       if interpreter['group'] == 'jdbc':
         interpreter['dependencies'] = []
         if params.hive_server_host:
@@ -328,23 +333,34 @@ class Master(Script):
                                                     params.zookeeper_znode_parent
             interpreter['dependencies'].append(
                 {"groupArtifactVersion": "org.apache.phoenix:phoenix-core:4.7.0-HBase-1.1", "local": "false"})
-      elif interpreter['group'] == 'livy' and params.livy_livyserver_host:
-        interpreter['properties']['zeppelin.livy.url'] = "http://" + params.livy_livyserver_host +\
-                                                      ":" + params.livy_livyserver_port
+      elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy':
+        if params.livy_livyserver_host:
+          interpreter['properties']['zeppelin.livy.url'] = "http://" + params.livy_livyserver_host + \
+                                                           ":" + params.livy_livyserver_port
+        else:
+          del interpreter_settings[setting_key]
+
+      elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy2':
+        if params.livy2_livyserver_host:
+          interpreter['properties']['zeppelin.livy.url'] = "http://" + params.livy2_livyserver_host + \
+                                                           ":" + params.livy2_livyserver_port
+        else:
+          del interpreter_settings[setting_key]
+
 
       elif interpreter['group'] == 'spark' and interpreter['name'] == 'spark':
-        if params.spark_home:
+        if 'spark-env' in params.config['configurations']:
           interpreter['properties']['master'] = "yarn-client"
           interpreter['properties']['SPARK_HOME'] = "/usr/hdp/current/spark-client/"
         else:
-          interpreter['properties']['master'] = "local[*]"
+          del interpreter_settings[setting_key]
 
       elif interpreter['group'] == 'spark' and interpreter['name'] == 'spark2':
-        if params.spark2_home:
+        if 'spark2-env' in params.config['configurations']:
           interpreter['properties']['master'] = "yarn-client"
           interpreter['properties']['SPARK_HOME'] = "/usr/hdp/current/spark2-client/"
         else:
-          interpreter['properties']['master'] = "local[*]"
+          del interpreter_settings[setting_key]
 
     self.set_interpreter_settings(config_data)
 
@@ -367,5 +383,11 @@ class Master(Script):
 
     return json.loads(spark2_config_template.template)
 
+  def get_livy2_interpreter_config(self):
+    import livy2_config_template
+    import json
+
+    return json.loads(livy2_config_template.template)
+
 if __name__ == "__main__":
   Master().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/a9060614/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
index 97e93fe..16a2782 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
@@ -161,14 +161,22 @@ spark_client_version = get_stack_version('spark-client')
 
 hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
 livy_hosts = default("/clusterHostInfo/livy_server_hosts", [])
+livy2_hosts = default("/clusterHostInfo/livy2_server_hosts", [])
 
 livy_livyserver_host = None
 livy_livyserver_port = None
+livy2_livyserver_host = None
+livy2_livyserver_port = None
 if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, stack_version_formatted) and \
     len(livy_hosts) > 0:
   livy_livyserver_host = str(livy_hosts[0])
   livy_livyserver_port = config['configurations']['livy-conf']['livy.server.port']
 
+if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY2, stack_version_formatted) and \
+    len(livy2_hosts) > 0:
+  livy2_livyserver_host = str(livy2_hosts[0])
+  livy2_livyserver_port = config['configurations']['livy2-conf']['livy.server.port']
+
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']


[44/50] [abbrv] ambari git commit: AMBARI-19058. Perf: Deploy 3000 Agent cluster and find perf bugs. Part5.(vbrodetskyi)

Posted by nc...@apache.org.
AMBARI-19058. Perf: Deploy 3000 Agent cluster and find perf bugs. Part5.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a7eafd59
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a7eafd59
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a7eafd59

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: a7eafd59db2f9771b9e2b08d6a71406e05309b5c
Parents: b8176ff
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Feb 13 14:10:21 2017 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Mon Feb 13 14:10:21 2017 +0200

----------------------------------------------------------------------
 .../resources/stacks/PERF/1.0/services/FAKEHBASE/metainfo.xml   | 2 +-
 contrib/utils/perf/deploy-gce-perf-cluster.py                   | 5 ++++-
 2 files changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a7eafd59/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/metainfo.xml
index 4dbd2bb..059deff 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/metainfo.xml
@@ -41,7 +41,7 @@
               </auto-deploy>
             </dependency>
             <dependency>
-              <name>FAKEZOOKEEPER/FAKEZOOKEEPER_SERVER</name>
+              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
               <scope>cluster</scope>
               <auto-deploy>
                 <enabled>true</enabled>

http://git-wip-us.apache.org/repos/asf/ambari/blob/a7eafd59/contrib/utils/perf/deploy-gce-perf-cluster.py
----------------------------------------------------------------------
diff --git a/contrib/utils/perf/deploy-gce-perf-cluster.py b/contrib/utils/perf/deploy-gce-perf-cluster.py
index 6364122..7431ae9 100644
--- a/contrib/utils/perf/deploy-gce-perf-cluster.py
+++ b/contrib/utils/perf/deploy-gce-perf-cluster.py
@@ -338,7 +338,10 @@ def create_server_script(server_host_name):
   "sed -i -f /home/ambari/ambari-server/src/main/resources/stacks/PERF/install_packages.sed /var/lib/ambari-agent/cache/custom_actions/scripts/install_packages.py\n" + \
   "\n" + \
   "\n" + \
-  "yum install mysql-connector-java* -y\n" + \
+  "cd /; wget http://central.maven.org/maven2/mysql/mysql-connector-java/5.1.40/mysql-connector-java-5.1.40.jar;\n" + \
+  "mkdir /usr/share/java; chmod 777 /usr/share/java;" + \
+  "cp mysql-connector-java-5.1.40.jar /usr/share/java/; chmod 777 /usr/share/java/mysql-connector-java-5.1.40.jar;\n" + \
+  "ln -s /usr/share/java/mysql-connector-java-5.1.40.jar /usr/share/java/mysql-connector-java.jar;\n" + \
   "cd /etc/yum.repos.d/; wget http://repo.mysql.com/mysql-community-release-el6-5.noarch.rpm; rpm -ivh mysql-community-release-el6-5.noarch.rpm;" + \
   "yum clean all; yum install mysql-server -y\n" + \
   "sed -i -e 's/mysqld]/mysqld]\\nmax_allowed_packet=1024M\\njoin_buffer_size=512M\\nsort_buffer_size=128M\\nread_rnd_buffer_size=128M\\ninnodb_buffer_pool_size=16G" \


[47/50] [abbrv] ambari git commit: AMBARI-19967 - Ambari Server Unit Test Failures (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-19967 - Ambari Server Unit Test Failures (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fa32fec6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fa32fec6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fa32fec6

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: fa32fec6d891745885b976cc2dc13512706dfed9
Parents: 6c4cbc4
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Feb 10 12:09:38 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Feb 13 11:23:33 2017 -0500

----------------------------------------------------------------------
 .../server/orm/dao/AlertDefinitionDAO.java      |  7 ++-
 .../ambari/server/orm/dao/AlertDispatchDAO.java | 45 ++++++++------------
 .../server/orm/entities/AlertGroupEntity.java   |  9 ++--
 .../server/orm/dao/AlertDispatchDAOTest.java    | 20 ++++-----
 .../state/cluster/AlertDataManagerTest.java     |  5 ++-
 5 files changed, 36 insertions(+), 50 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fa32fec6/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
index 703ff58..c3e3a9f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
@@ -321,16 +321,15 @@ public class AlertDefinitionDAO {
     EntityManager entityManager = entityManagerProvider.get();
     entityManager.persist(alertDefinition);
 
-    AlertGroupEntity group = dispatchDao.findDefaultServiceGroup(
-        alertDefinition.getClusterId(), alertDefinition.getServiceName());
+    AlertGroupEntity group = dispatchDao.findDefaultServiceGroup(alertDefinition.getClusterId(),
+        alertDefinition.getServiceName());
 
     if (null == group) {
       // create the default alert group for the new service; this MUST be done
       // before adding definitions so that they are properly added to the
       // default group
       String serviceName = alertDefinition.getServiceName();
-      group = dispatchDao.createDefaultGroup(alertDefinition.getClusterId(),
-          serviceName);
+      group = dispatchDao.createDefaultGroup(alertDefinition.getClusterId(), serviceName);
     }
 
     group.addAlertDefinition(alertDefinition);

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa32fec6/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDispatchDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDispatchDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDispatchDAO.java
index 3b9c97a..5bd84ad 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDispatchDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDispatchDAO.java
@@ -22,7 +22,6 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
 
 import javax.persistence.EntityManager;
 import javax.persistence.TypedQuery;
@@ -52,6 +51,7 @@ import org.eclipse.persistence.config.QueryHints;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.util.concurrent.Striped;
 import com.google.inject.Inject;
 import com.google.inject.Provider;
 import com.google.inject.Singleton;
@@ -83,10 +83,11 @@ public class AlertDispatchDAO {
   private Provider<Clusters> m_clusters;
 
   /**
-   * A lock that ensures that group writes are protected. This is useful since
-   * groups can be created through different events/threads in the system.
+   * Used for ensuring that the concurrent nature of the event handler methods
+   * don't collide when attempting to creation alert groups for the same
+   * service.
    */
-  private final Lock m_groupLock = new ReentrantLock();
+  private Striped<Lock> m_locksByService = Striped.lazyWeakLock(20);
 
   private static final Logger LOG = LoggerFactory.getLogger(AlertDispatchDAO.class);
 
@@ -195,24 +196,6 @@ public class AlertDispatchDAO {
   }
 
   /**
-   * Gets an alert group with the specified name across all clusters. Alert
-   * group names are unique within a cluster.
-   *
-   * @param groupName
-   *          the name of the group (not {@code null}).
-   * @return the alert group or {@code null} if none exists.
-   */
-  @RequiresSession
-  public AlertGroupEntity findGroupByName(String groupName) {
-    TypedQuery<AlertGroupEntity> query = entityManagerProvider.get().createNamedQuery(
-        "AlertGroupEntity.findByName", AlertGroupEntity.class);
-
-    query.setParameter("groupName", groupName);
-
-    return daoUtils.selectSingle(query);
-  }
-
-  /**
    * Gets an alert group with the specified name for the given cluster. Alert
    * group names are unique within a cluster.
    *
@@ -390,7 +373,7 @@ public class AlertDispatchDAO {
     }
 
     // sorting
-    JpaSortBuilder<AlertNoticeEntity> sortBuilder = new JpaSortBuilder<AlertNoticeEntity>();
+    JpaSortBuilder<AlertNoticeEntity> sortBuilder = new JpaSortBuilder<>();
     List<Order> sortOrders = sortBuilder.buildSortOrders(request.Sort, visitor);
     query.orderBy(sortOrders);
 
@@ -466,6 +449,7 @@ public class AlertDispatchDAO {
   @Transactional
   public AlertGroupEntity createDefaultGroup(long clusterId, String serviceName)
       throws AmbariException {
+
     // AMBARI is a special service that we let through, otherwise we need to
     // verify that the service exists before we create the default group
     String ambariServiceName = Services.AMBARI.name();
@@ -481,21 +465,26 @@ public class AlertDispatchDAO {
       }
     }
 
-    AlertGroupEntity group = new AlertGroupEntity();
+    Lock lock = m_locksByService.get(serviceName);
+    lock.lock();
 
-    m_groupLock.lock();
     try {
+      AlertGroupEntity group = findDefaultServiceGroup(clusterId, serviceName);
+      if (null != group) {
+        return group;
+      }
+
+      group = new AlertGroupEntity();
       group.setClusterId(clusterId);
       group.setDefault(true);
       group.setGroupName(serviceName);
       group.setServiceName(serviceName);
 
       create(group);
+      return group;
     } finally {
-      m_groupLock.unlock();
+      lock.unlock();
     }
-
-    return group;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa32fec6/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java
index b660631..7ca26e6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java
@@ -62,9 +62,6 @@ import javax.persistence.UniqueConstraint;
         name = "AlertGroupEntity.findAllInCluster",
         query = "SELECT alertGroup FROM AlertGroupEntity alertGroup WHERE alertGroup.clusterId = :clusterId"),
     @NamedQuery(
-        name = "AlertGroupEntity.findByName",
-        query = "SELECT alertGroup FROM AlertGroupEntity alertGroup WHERE alertGroup.groupName = :groupName"),
-    @NamedQuery(
         name = "AlertGroupEntity.findByNameInCluster",
         query = "SELECT alertGroup FROM AlertGroupEntity alertGroup WHERE alertGroup.groupName = :groupName AND alertGroup.clusterId = :clusterId"),
     @NamedQuery(
@@ -226,7 +223,7 @@ public class AlertGroupEntity {
    */
   public Set<AlertDefinitionEntity> getAlertDefinitions() {
     if (null == alertDefinitions) {
-      alertDefinitions = new HashSet<AlertDefinitionEntity>();
+      alertDefinitions = new HashSet<>();
     }
 
     return Collections.unmodifiableSet(alertDefinitions);
@@ -263,7 +260,7 @@ public class AlertGroupEntity {
    */
   public void addAlertDefinition(AlertDefinitionEntity definition) {
     if (null == alertDefinitions) {
-      alertDefinitions = new HashSet<AlertDefinitionEntity>();
+      alertDefinitions = new HashSet<>();
     }
 
     alertDefinitions.add(definition);
@@ -308,7 +305,7 @@ public class AlertGroupEntity {
    */
   public void addAlertTarget(AlertTargetEntity alertTarget) {
     if (null == alertTargets) {
-      alertTargets = new HashSet<AlertTargetEntity>();
+      alertTargets = new HashSet<>();
     }
 
     alertTargets.add(alertTarget);

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa32fec6/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDispatchDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDispatchDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDispatchDAOTest.java
index 1ec6d40..0bdd5b2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDispatchDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDispatchDAOTest.java
@@ -158,7 +158,7 @@ public class AlertDispatchDAOTest {
     assertEquals(1, targets.size());
 
     // find by ids
-    List<Long> ids = new ArrayList<Long>();
+    List<Long> ids = new ArrayList<>();
     ids.add(targets.get(0).getTargetId());
     ids.add(99999L);
 
@@ -213,7 +213,7 @@ public class AlertDispatchDAOTest {
     assertEquals(group, actual);
 
     //find by id
-    List<Long> ids = new ArrayList<Long>();
+    List<Long> ids = new ArrayList<>();
     ids.add(groups.get(0).getGroupId());
     ids.add(groups.get(1).getGroupId());
     ids.add(99999L);
@@ -243,7 +243,7 @@ public class AlertDispatchDAOTest {
   public void testCreateUpdateRemoveGroup() throws Exception {
     // create group
     AlertTargetEntity target = m_helper.createAlertTarget();
-    Set<AlertTargetEntity> targets = new HashSet<AlertTargetEntity>();
+    Set<AlertTargetEntity> targets = new HashSet<>();
     targets.add(target);
 
     AlertGroupEntity group = m_helper.createAlertGroup(
@@ -299,7 +299,7 @@ public class AlertDispatchDAOTest {
     int targetCount = m_dao.findAllTargets().size();
 
     AlertTargetEntity target = m_helper.createAlertTarget();
-    Set<AlertTargetEntity> targets = new HashSet<AlertTargetEntity>();
+    Set<AlertTargetEntity> targets = new HashSet<>();
     targets.add(target);
 
     AlertGroupEntity group = m_helper.createAlertGroup(
@@ -430,7 +430,7 @@ public class AlertDispatchDAOTest {
   @Test
   public void testDeleteAssociatedTarget() throws Exception {
     AlertTargetEntity target = m_helper.createAlertTarget();
-    Set<AlertTargetEntity> targets = new HashSet<AlertTargetEntity>();
+    Set<AlertTargetEntity> targets = new HashSet<>();
     targets.add(target);
 
     AlertGroupEntity group = m_helper.createAlertGroup(
@@ -473,7 +473,7 @@ public class AlertDispatchDAOTest {
 
     m_dao.merge(group);
 
-    group = m_dao.findGroupByName(group.getGroupName());
+    group = m_dao.findGroupByName(m_cluster.getClusterId(), group.getGroupName());
     assertEquals(definitions.size(), group.getAlertDefinitions().size());
 
     // assert that the definition is now part of 2 groups (the default group
@@ -690,7 +690,7 @@ public class AlertDispatchDAOTest {
 
     m_alertHelper.populateData(m_cluster);
 
-    List<SortRequestProperty> sortProperties = new ArrayList<SortRequestProperty>();
+    List<SortRequestProperty> sortProperties = new ArrayList<>();
     SortRequest sortRequest = new SortRequestImpl(sortProperties);
 
     AlertNoticeRequest request = new AlertNoticeRequest();
@@ -850,7 +850,7 @@ public class AlertDispatchDAOTest {
    * @throws Exception
    */
   private Set<AlertTargetEntity> createTargets(int numberOfTargets) throws Exception {
-    Set<AlertTargetEntity> targets = new HashSet<AlertTargetEntity>();
+    Set<AlertTargetEntity> targets = new HashSet<>();
     for (int i = 0; i < numberOfTargets; i++) {
       AlertTargetEntity target = new AlertTargetEntity();
       target.setDescription("Target Description " + i);
@@ -883,7 +883,7 @@ public class AlertDispatchDAOTest {
 
     m_dao.merge(group);
 
-    group = m_dao.findGroupByName(group.getGroupName());
+    group = m_dao.findGroupByName(m_cluster.getClusterId(), group.getGroupName());
     assertEquals(definitions.size(), group.getAlertDefinitions().size());
 
     for (AlertDefinitionEntity definition : definitions) {
@@ -894,7 +894,7 @@ public class AlertDispatchDAOTest {
     m_definitionDao.remove(definitions.get(0));
     definitions.remove(0);
 
-    group = m_dao.findGroupByName(group.getGroupName());
+    group = m_dao.findGroupByName(m_cluster.getClusterId(), group.getGroupName());
     assertEquals(definitions.size(), group.getAlertDefinitions().size());
 
     for (AlertDefinitionEntity definition : definitions) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa32fec6/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/AlertDataManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/AlertDataManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/AlertDataManagerTest.java
index 4ad93e6..1e74658 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/AlertDataManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/AlertDataManagerTest.java
@@ -113,6 +113,7 @@ public class AlertDataManagerTest {
   public void setup() throws Exception {
     m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
     EventBusSynchronizer.synchronizeAlertEventPublisher(m_injector);
+    EventBusSynchronizer.synchronizeAmbariEventPublisher(m_injector);
     m_injector.getInstance(GuiceJpaInitializer.class);
     m_injector.getInstance(UnitOfWork.class).begin();
 
@@ -311,7 +312,7 @@ public class AlertDataManagerTest {
     m_dao.create(currentAlert);
 
     AlertTargetEntity target = m_helper.createAlertTarget();
-    Set<AlertTargetEntity> targets = new HashSet<AlertTargetEntity>();
+    Set<AlertTargetEntity> targets = new HashSet<>();
     targets.add(target);
 
     AlertGroupEntity group = m_helper.createAlertGroup(
@@ -419,7 +420,7 @@ public class AlertDataManagerTest {
     AlertEventPublisher publisher = m_injector.getInstance(AlertEventPublisher.class);
     EventBusSynchronizer.synchronizeAlertEventPublisher(m_injector);
 
-    final AtomicReference<Alert> ref = new AtomicReference<Alert>();
+    final AtomicReference<Alert> ref = new AtomicReference<>();
     publisher.register(new TestListener() {
       @Override
       @Subscribe


[16/50] [abbrv] ambari git commit: AMBARI-19956. Workflow Manager Flow Graph should show line transition to decision path and error node (Madhan Mohan Reddy via gauravn7).

Posted by nc...@apache.org.
AMBARI-19956. Workflow Manager Flow Graph should show line transition to decision path and error node (Madhan Mohan Reddy via gauravn7).


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b15b6064
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b15b6064
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b15b6064

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b15b60646c3d8a0b39579718f48e0076c26a6142
Parents: 1339d52
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Fri Feb 10 16:50:55 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Fri Feb 10 16:50:55 2017 +0530

----------------------------------------------------------------------
 .../resources/ui/app/components/job-details.js  | 43 ++++++++++++++++----
 1 file changed, 34 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b15b6064/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js
index 6507c49..d1343b0 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js
@@ -192,7 +192,7 @@ export default Ember.Component.extend({
     var dataNodes = [];
     var self=this;
     workflow.nodeVisitor.process(workflow.startNode, function(node) {
-      if (node.type === 'kill') {
+      if (node.type === 'kill' && !(node.forceRenderNode || self.getActionNode(node.name, node.type))) {
         return;
       }
       var nodeActionStatus = self.getActionStatus(node.name, node.type);
@@ -206,18 +206,28 @@ export default Ember.Component.extend({
       });
         if (node.transitions.length > 0) {
           node.transitions.forEach(function(tran){
-            if (tran.targetNode.type === 'kill') {
-              return;
-            }
             var transitionBorderColor;
             var actionNode = self.getActionNode(node.name, node.type);
-            if (actionNode && (actionNode.transition===tran.targetNode.name ||actionNode.transition==='*')){
-              transitionBorderColor = Constants.successfulFlowColor;//green
+            if (tran.targetNode.type === 'kill' &&
+              !((actionNode && actionNode.transition===tran.targetNode.name) || (node.isPlaceholder()))) {
+              return;
+            }
+             if (tran.getTargetNode(true).isKillNode()  && !tran.isOnError()){
+              tran.targetNode.forceRenderNode = true;
+             }
+            if (actionNode && (actionNode.transition===tran.targetNode.name ||actionNode.transition==='*' || (tran.targetNode.isPlaceholder() && actionNode.transition===tran.getTargetNode(true).name))) {
+              transitionBorderColor = Constants.successfulFlowColor;
+              if (tran.targetNode.isPlaceholder()) {
+                tran.targetNode.successfulFlow = true;
+              }
             }else{
-              transitionBorderColor = Constants.defaultFlowColor;//grey
+              transitionBorderColor = Constants.defaultFlowColor;
             }
             if (!actionNode){
-              transitionBorderColor = Constants.defaultFlowColor;//grey
+              transitionBorderColor = Constants.defaultFlowColor;
+              if (node.isPlaceholder() && node.successfulFlow) {
+                transitionBorderColor = Constants.successfulFlowColor;
+              }
             }
             dataNodes.push(
               {
@@ -225,6 +235,7 @@ export default Ember.Component.extend({
                   id: tran.sourceNodeId + '_to_' + tran.targetNode.id,
                   source:tran.sourceNodeId,
                   target: tran.targetNode.id,
+                  transition: tran,
                   borderColor: transitionBorderColor
                 }
               }
@@ -268,6 +279,14 @@ export default Ember.Component.extend({
             }
           },
           {
+            selector: 'node[type = "placeholder"]',
+            style: {
+              width: 1,
+              height: 1,
+              label: ''
+            }
+          },
+          {
             selector: 'node[shape = "roundrectangle"]',
             style: {
               width: 100,
@@ -294,7 +313,13 @@ export default Ember.Component.extend({
               width: 1,
               'line-color': 'data(borderColor)',
               'curve-style': 'bezier',
-      				'target-arrow-shape': 'triangle',
+              'target-arrow-shape': function(target){
+                if (target.data().transition && target.data().transition.getTargetNode(false) && !target.data().transition.getTargetNode(false).isPlaceholder()) {
+                  return "triangle";
+                }else{
+                  return "none";
+                }
+              },
               'target-arrow-color': 'data(borderColor)'
             }
           }


[07/50] [abbrv] ambari git commit: AMBARI-19928. Solr grafana dashboards. (Willy Solaligue via yusaku)

Posted by nc...@apache.org.
AMBARI-19928. Solr grafana dashboards. (Willy Solaligue via yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/00ed4159
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/00ed4159
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/00ed4159

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 00ed41594fdd03c7da715904efec522748c1e3bd
Parents: 05ce603
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Thu Feb 9 16:49:03 2017 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Thu Feb 9 16:49:03 2017 -0800

----------------------------------------------------------------------
 .../HDP/grafana-solr-cores-dashboard.json       | 3162 ++++++++++++++++++
 .../HDP/grafana-solr-hosts-dashboard.json       |  538 +++
 2 files changed, 3700 insertions(+)
----------------------------------------------------------------------



[35/50] [abbrv] ambari git commit: AMBARI-19977. Increase default timeout for RM (aonishuk)

Posted by nc...@apache.org.
AMBARI-19977. Increase default timeout for RM (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6098d342
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6098d342
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6098d342

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 6098d342c14ede52f2743274482ce3d37c0917b6
Parents: 77b5b16
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Sat Feb 11 18:16:48 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Sat Feb 11 18:16:48 2017 +0200

----------------------------------------------------------------------
 .../stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml     | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6098d342/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
index fcd9380..0eb3366 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
@@ -127,13 +127,13 @@
   </property>
   <property>
     <name>yarn.resourcemanager.connect.retry-interval.ms</name>
-    <value>30000</value>
+    <value>15000</value>
     <description>How often to try connecting to the ResourceManager.</description>
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
     <name>yarn.resourcemanager.connect.max-wait.ms</name>
-    <value>900000</value>
+    <value>-1</value>
     <description>Maximum time to wait to establish connection to ResourceManager</description>
     <on-ambari-upgrade add="true"/>
   </property>


[50/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-feature-AMBARI-12556

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-12556


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/341cb124
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/341cb124
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/341cb124

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 341cb1247cbf67f5611766d8f5bf27bd878441e1
Parents: d222f57 7cb9a6a
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Feb 13 17:11:38 2017 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Feb 13 17:11:38 2017 -0500

----------------------------------------------------------------------
 .../controllers/clusters/UserAccessListCtrl.js  |    5 +-
 .../clusters/UserAccessListCtrl_test.js         |   90 -
 ambari-agent/conf/unix/ambari-agent             |    7 +-
 ambari-agent/pom.xml                            |    3 +
 .../ambari_agent/CustomServiceOrchestrator.py   |   27 +-
 .../TestCustomServiceOrchestrator.py            |   18 +
 .../python/ambari_agent/TestProcessUtils.py     |  224 ++
 .../src/test/python/ambari_agent/TestShell.py   |    5 +-
 .../resource_management/TestContentSources.py   |   11 +-
 .../ambari_commons/credential_store_helper.py   |   45 +
 .../src/main/python/ambari_commons/firewall.py  |    3 +
 .../src/main/python/ambari_commons/network.py   |   39 +
 .../main/python/ambari_commons/process_utils.py |  100 +
 .../src/main/python/ambari_commons/shell.py     |   54 +-
 .../python/resource_management/core/logger.py   |    4 +
 .../python/resource_management/core/shell.py    |    6 +-
 .../python/resource_management/core/source.py   |    8 +-
 .../functions/get_not_managed_resources.py      |   19 +-
 .../HDP/2.1.1/services/STORM/metrics.json       |   10 +-
 .../ambari-infra-solr-plugin/pom.xml            |   56 +
 .../InfraKerberosHostValidator.java             |   54 +
 .../InfraRuleBasedAuthorizationPlugin.java      |  542 +++
 .../InfraUserRolesLookupStrategy.java           |   49 +
 .../InfraKerberosHostValidatorTest.java         |  114 +
 .../InfraRuleBasedAuthorizationPluginTest.java  |  247 ++
 .../InfraUserRolesLookupStrategyTest.java       |   83 +
 .../ambari-logsearch-assembly/pom.xml           |   15 +-
 .../logsearch/common/LogSearchConstants.java    |    1 -
 .../UserConfigRequestQueryConverter.java        |   17 +-
 .../ambari/logsearch/doc/DocConstants.java      |    3 -
 .../logsearch/manager/UserConfigManager.java    |   53 +-
 .../request/UserConfigParamDefinition.java      |    6 -
 .../model/request/impl/UserConfigRequest.java   |   15 +-
 .../model/response/UserConfigData.java          |   11 -
 .../logsearch/rest/UserConfigResource.java      |   17 +-
 .../ambari/logsearch/solr/SolrConstants.java    |    1 -
 .../src/main/webapp/index.html                  |  211 +-
 .../daterangepicker/css/daterangepicker.css     |  415 +++
 .../daterangepicker/js/daterangepicker.js       | 1560 ++++++++
 .../libs/custom/timezone/WorldMapGenerator.js   | 3474 ++++++++++++++++++
 .../libs/custom/timezone/jstz-1.0.4.min.js      |    2 +
 .../daterangepicker/css/daterangepicker.css     |  415 ---
 .../other/daterangepicker/js/daterangepicker.js | 1560 --------
 .../libs/other/timezone/WorldMapGenerator.js    | 3471 -----------------
 .../libs/other/timezone/jstz-1.0.4.min.js       |    2 -
 .../webapp/libs/other/timezone/mapdata.json     | 3158 ----------------
 .../src/main/webapp/scripts/Init.js             |  439 ++-
 .../scripts/views/common/DashboardLayout.js     |   80 -
 .../scripts/views/common/EventHistoryLayout.js  |    1 -
 .../views/dashboard/DashboardLayoutView.js      |  354 --
 .../scripts/views/dashboard/MainLayoutView.js   | 1259 ++++---
 .../views/dialog/ApplySearchFilterView.js       |   41 +-
 .../views/tabs/EventHistoryLayoutView.js        |    1 -
 .../dashboard/DashboardLayoutView_tmpl.html     |   28 -
 .../UserConfigRequestQueryConverterTest.java    |    4 +-
 ambari-logsearch/pom.xml                        |    1 +
 .../sink/flume/FlumeTimelineMetricsSink.java    |    2 +
 .../conf/hadoop-metrics2-hbase.properties.j2    |   49 -
 .../src/main/conf/hadoop-metrics2.properties.j2 |   58 -
 .../metrics/timeline/PhoenixHBaseAccessor.java  |    6 +-
 .../TimelineMetricAppAggregator.java            |    4 +-
 .../TimelineMetricClusterAggregatorSecond.java  |   92 +-
 .../aggregators/TimelineMetricReadHelper.java   |    3 +-
 ...melineMetricClusterAggregatorSecondTest.java |  114 +-
 ambari-server/pom.xml                           |    6 +
 ambari-server/src/main/assemblies/server.xml    |   25 +
 .../server/actionmanager/ActionDBAccessor.java  |    5 +-
 .../actionmanager/ActionDBAccessorImpl.java     |    6 +-
 .../server/actionmanager/ActionScheduler.java   |   53 +-
 .../query/render/ClusterBlueprintRenderer.java  |  122 +
 .../server/controller/ControllerModule.java     |   18 +-
 .../internal/ServiceResourceProvider.java       |    1 -
 .../metrics/RestMetricsPropertyProvider.java    |  106 +-
 .../metrics/timeline/AMSPropertyProvider.java   |    8 +
 .../system/impl/AmbariMetricSinkImpl.java       |   44 +-
 .../server/orm/dao/AlertDefinitionDAO.java      |    7 +-
 .../ambari/server/orm/dao/AlertDispatchDAO.java |   45 +-
 .../apache/ambari/server/orm/dao/ViewDAO.java   |    8 +-
 .../server/orm/entities/AlertGroupEntity.java   |    9 +-
 .../orm/entities/ServiceConfigEntity.java       |    2 +-
 .../orm/entities/ServiceDesiredStateEntity.java |   21 -
 .../org/apache/ambari/server/state/Service.java |    8 -
 .../apache/ambari/server/state/ServiceImpl.java |   40 +-
 .../ambari/server/state/theme/TabLayout.java    |    8 +-
 .../ambari/server/topology/AmbariContext.java   |   36 +-
 .../server/upgrade/UpgradeCatalog250.java       |  116 +-
 .../apache/ambari/server/view/ViewRegistry.java |   62 +
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |    1 -
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |    1 -
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |    1 -
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |    1 -
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |    1 -
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |    1 -
 .../configuration/infra-solr-security-json.xml  |   62 +-
 .../0.1.0/package/scripts/params.py             |   29 +-
 .../properties/infra-solr-security.json.j2      |   46 +
 .../HDP/grafana-solr-cores-dashboard.json       | 3162 ++++++++++++++++
 .../HDP/grafana-solr-hosts-dashboard.json       |  538 +++
 .../AMBARI_METRICS/0.1.0/package/scripts/ams.py |    6 +-
 .../package/scripts/metrics_grafana_util.py     |    6 +-
 .../0.1.0/package/scripts/network.py            |   39 -
 .../0.1.0/package/scripts/params.py             |   38 +-
 .../0.1.0/package/scripts/service_check.py      |    4 +-
 .../hadoop-metrics2-hbase.properties.j2         |    4 +
 .../DRUID/0.9.2/configuration/druid-common.xml  |   16 +-
 .../0.9.2/configuration/druid-superset-env.xml  |  115 +
 .../0.9.2/configuration/druid-superset.xml      |  178 +
 .../common-services/DRUID/0.9.2/metainfo.xml    |   28 +-
 .../DRUID/0.9.2/package/scripts/druid.py        |    2 +
 .../DRUID/0.9.2/package/scripts/druid_node.py   |    2 +-
 .../DRUID/0.9.2/package/scripts/params.py       |   50 +-
 .../0.9.2/package/scripts/status_params.py      |    2 +
 .../DRUID/0.9.2/package/scripts/superset.py     |  153 +
 .../DRUID/0.9.2/package/templates/superset.sh   |   95 +
 .../DRUID/0.9.2/quicklinks/quicklinks.json      |   13 +
 .../DRUID/0.9.2/role_command_order.json         |   18 +
 .../DRUID/0.9.2/themes/theme.json               |   84 +-
 .../hadoop-metrics2.properties.xml              |  125 -
 .../common-services/HDFS/2.1.0.2.0/metainfo.xml |    1 -
 .../package/alerts/alert_metrics_deviation.py   |   14 +-
 .../2.1.0.2.0/package/scripts/zkfc_slave.py     |    3 +-
 .../HDFS/3.0.0.3.0/configuration/core-site.xml  |   30 +-
 .../HDFS/3.0.0.3.0/configuration/hadoop-env.xml |   42 +-
 .../hadoop-metrics2.properties.xml              |    2 +-
 .../3.0.0.3.0/configuration/hadoop-policy.xml   |   22 +-
 .../HDFS/3.0.0.3.0/configuration/hdfs-log4j.xml |    2 +-
 .../configuration/hdfs-logsearch-conf.xml       |    6 +-
 .../HDFS/3.0.0.3.0/configuration/hdfs-site.xml  |  109 +-
 .../HDFS/3.0.0.3.0/configuration/ssl-client.xml |   14 +-
 .../HDFS/3.0.0.3.0/configuration/ssl-server.xml |   16 +-
 .../package/alerts/alert_metrics_deviation.py   |   14 +-
 .../3.0.0.3.0/package/scripts/params_linux.py   |    2 +-
 .../HDFS/3.0.0.3.0/package/scripts/utils.py     |    8 +-
 .../3.0.0.3.0/package/scripts/zkfc_slave.py     |    1 +
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     |    2 +-
 .../package/scripts/hive_interactive.py         |    6 +-
 .../package/scripts/hive_server_interactive.py  |   12 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |   48 +-
 .../0.9.0/configuration/ranger-kafka-audit.xml  |   32 +-
 .../ranger-kafka-plugin-properties.xml          |   14 +-
 .../ranger-kafka-policymgr-ssl.xml              |   12 +-
 .../configuration/ranger-kafka-security.xml     |   12 +-
 .../ranger-knox-plugin-properties.xml           |   12 +-
 .../0.5.0/configuration/logfeeder-env.xml       |    2 +
 .../0.5.0/configuration/logsearch-env.xml       |    2 +
 .../LOGSEARCH/0.5.0/metainfo.xml                |    6 +
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |   22 +-
 .../0.5.0/package/scripts/setup_logfeeder.py    |   28 +-
 .../0.5.0/package/scripts/setup_logsearch.py    |   22 +-
 .../4.0.0.2.0/package/scripts/params_linux.py   |   20 +-
 .../SPARK/1.2.1/package/scripts/params.py       |    1 +
 .../SPARK/1.2.1/package/scripts/setup_livy.py   |    8 +
 .../SPARK2/2.0.0/package/scripts/params.py      |    1 +
 .../SPARK2/2.0.0/package/scripts/setup_livy2.py |    8 +
 .../0.10.0/configuration/ranger-storm-audit.xml |   32 +-
 .../ranger-storm-policymgr-ssl.xml              |   12 +-
 .../configuration/ranger-storm-security.xml     |   12 +-
 .../common-services/STORM/0.10.0/metrics.json   |    8 +-
 .../common-services/STORM/0.9.1/alerts.json     |   30 +-
 .../common-services/STORM/0.9.1/metrics.json    |    8 +-
 .../STORM/0.9.1/quicklinks/quicklinks.json      |   25 +-
 .../common-services/STORM/0.9.3/metrics.json    |    8 +-
 .../ranger-storm-plugin-properties.xml          |   71 -
 .../configuration-mapred/mapred-env.xml         |   14 +-
 .../mapred-logsearch-conf.xml                   |    6 +-
 .../configuration-mapred/mapred-site.xml        |   90 +-
 .../YARN/3.0.0.3.0/configuration/yarn-env.xml   |   26 +-
 .../YARN/3.0.0.3.0/configuration/yarn-log4j.xml |    2 +-
 .../configuration/yarn-logsearch-conf.xml       |    6 +-
 .../YARN/3.0.0.3.0/configuration/yarn-site.xml  |  225 +-
 .../common-services/YARN/3.0.0.3.0/metainfo.xml |    6 +-
 .../YARN/3.0.0.3.0/package/scripts/service.py   |    4 +-
 .../3.0.0.3.0/package/scripts/status_params.py  |   10 +-
 .../YARN/3.0.0.3.0/service_advisor.py           |   23 +-
 .../package/scripts/livy2_config_template.py    |  107 +
 .../0.6.0.2.5/package/scripts/master.py         |   42 +-
 .../0.6.0.2.5/package/scripts/params.py         |    8 +
 .../2.0.6/hooks/before-START/scripts/params.py  |    4 +-
 .../scripts/shared_initialization.py            |   17 +-
 .../ranger-hbase-plugin-properties.xml          |   10 +-
 .../ranger-hdfs-plugin-properties.xml           |   12 +-
 .../ranger-hive-plugin-properties.xml           |   10 +-
 .../ranger-knox-plugin-properties.xml           |    2 +-
 .../services/YARN/configuration/yarn-site.xml   |    4 +-
 .../stacks/HDP/2.2/services/stack_advisor.py    |    8 +-
 .../HBASE/configuration/ranger-hbase-audit.xml  |   32 +-
 .../ranger-hbase-policymgr-ssl.xml              |   12 +-
 .../configuration/ranger-hbase-security.xml     |   14 +-
 .../configuration/ranger-hdfs-policymgr-ssl.xml |   12 +-
 .../HDFS/configuration/ranger-hdfs-security.xml |   14 +-
 .../HIVE/configuration/ranger-hive-audit.xml    |   32 +-
 .../configuration/ranger-hive-policymgr-ssl.xml |   12 +-
 .../HIVE/configuration/ranger-hive-security.xml |   14 +-
 .../ranger-kafka-policymgr-ssl.xml              |    4 +-
 .../KNOX/configuration/ranger-knox-audit.xml    |   32 +-
 .../configuration/ranger-knox-policymgr-ssl.xml |   12 +-
 .../KNOX/configuration/ranger-knox-security.xml |   12 +-
 .../ranger-storm-policymgr-ssl.xml              |    4 +-
 .../configuration/ranger-storm-security.xml     |    2 +-
 .../YARN/configuration/ranger-yarn-audit.xml    |   32 +-
 .../ranger-yarn-plugin-properties.xml           |   12 +-
 .../configuration/ranger-yarn-policymgr-ssl.xml |   12 +-
 .../YARN/configuration/ranger-yarn-security.xml |   12 +-
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  |   18 +
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml |   18 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |    3 +
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |   17 +
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml |   21 +
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |    3 +
 .../ATLAS/configuration/ranger-atlas-audit.xml  |   18 +-
 .../ranger-atlas-plugin-properties.xml          |   58 +-
 .../ranger-atlas-policymgr-ssl.xml              |   12 +-
 .../configuration/ranger-atlas-security.xml     |   14 +-
 .../ranger-hbase-plugin-properties.xml          |   71 -
 .../ranger-hdfs-plugin-properties.xml           |   50 +-
 .../HIVE/configuration/beeline-log4j2.xml       |    2 +-
 .../services/HIVE/configuration/hive-env.xml    |   29 +
 .../HIVE/configuration/hive-exec-log4j.xml      |  114 +
 .../HIVE/configuration/hive-exec-log4j2.xml     |    2 +-
 .../HIVE/configuration/hive-interactive-env.xml |    8 +-
 .../services/HIVE/configuration/hive-log4j.xml  |  126 +
 .../services/HIVE/configuration/hive-log4j2.xml |    2 +-
 .../HIVE/configuration/llap-cli-log4j2.xml      |    2 +-
 .../HIVE/configuration/llap-daemon-log4j.xml    |    2 +-
 .../ranger-hive-plugin-properties.xml           |   71 -
 .../HIVE/configuration/ranger-hive-security.xml |    2 +-
 .../HIVE/configuration/webhcat-log4j.xml        |   83 +
 .../HDP/2.5/services/HIVE/themes/theme.json     |   48 +
 .../ranger-kafka-plugin-properties.xml          |   71 -
 .../ranger-knox-plugin-properties.xml           |   71 -
 .../ranger-storm-policymgr-ssl.xml              |    4 +-
 .../configuration/ranger-storm-security.xml     |    2 +-
 .../ranger-yarn-plugin-properties.xml           |   71 -
 .../services/YARN/configuration/yarn-site.xml   |    6 +
 .../stacks/HDP/2.5/services/stack_advisor.py    |   72 +-
 .../stacks/HDP/2.5/upgrades/config-upgrade.xml  |   27 +
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml |    8 +-
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml |   27 +
 .../stacks/HDP/2.5/upgrades/upgrade-2.5.xml     |    2 +-
 .../stacks/HDP/2.5/upgrades/upgrade-2.6.xml     |    4 +
 .../stacks/HDP/2.6/role_command_order.json      |   19 -
 .../ranger-atlas-plugin-properties.xml          |   71 +
 .../stacks/HDP/2.6/services/DRUID/kerberos.json |   52 +-
 .../stacks/HDP/2.6/services/DRUID/metainfo.xml  |    1 +
 .../ranger-hbase-plugin-properties.xml          |   71 +
 .../hadoop-metrics2.properties.xml              |  125 +
 .../ranger-hdfs-plugin-properties.xml           |   70 +
 .../stacks/HDP/2.6/services/HDFS/metainfo.xml   |    3 +
 .../HIVE/configuration/hive-interactive-env.xml |   35 +-
 .../configuration/hive-interactive-site.xml     |   16 +
 .../ranger-hive-plugin-properties.xml           |   71 +
 .../ranger-kafka-plugin-properties.xml          |   71 +
 .../ranger-knox-plugin-properties.xml           |   71 +
 .../services/SPARK/configuration/livy-conf.xml  |   24 +
 .../SPARK2/configuration/livy2-conf.xml         |   24 +
 .../ranger-storm-plugin-properties.xml          |   71 +
 .../ranger-yarn-plugin-properties.xml           |   71 +
 .../services/YARN/configuration/yarn-site.xml   |   14 +-
 .../stacks/HDP/2.6/services/stack_advisor.py    |   23 +
 .../services/YARN/configuration/yarn-site.xml   |   13 +
 .../stacks/HDPWIN/2.2/services/stack_advisor.py |    8 +-
 .../PERF/1.0/hooks/before-ANY/scripts/hook.py   |    9 +
 .../PERF/1.0/hooks/before-ANY/scripts/params.py |   44 +
 .../before-ANY/scripts/shared_initialization.py |   94 +
 .../1.0/hooks/before-INSTALL/scripts/hook.py    |    1 +
 .../1.0/hooks/before-RESTART/scripts/hook.py    |    1 +
 .../PERF/1.0/hooks/before-START/scripts/hook.py |    1 +
 .../AMBARI_METRICS/configuration/ams-site.xml   |   36 +
 .../1.0/services/AMBARI_METRICS/metainfo.xml    |   46 +
 .../PERF/1.0/services/FAKEHBASE/metainfo.xml    |    2 +-
 .../PERF/1.0/services/FAKEHDFS/metainfo.xml     |    1 -
 .../PERF/1.0/services/FAKEZOOKEEPER/alerts.json |    4 +-
 .../1.0/services/FAKEZOOKEEPER/kerberos.json    |    4 +-
 .../1.0/services/FAKEZOOKEEPER/metainfo.xml     |    7 +-
 .../package/scripts/zookeeper_client.py         |    2 +-
 .../package/scripts/zookeeper_server.py         |    2 +-
 .../stacks/PERF/1.0/services/stack_advisor.py   |   37 +
 .../src/main/resources/stacks/stack_advisor.py  |   51 +-
 .../apache/ambari/server/H2DatabaseCleaner.java |   98 +-
 .../actionmanager/TestActionScheduler.java      |  119 +-
 .../render/ClusterBlueprintRendererTest.java    |  143 +
 .../RestMetricsPropertyProviderTest.java        |    4 +-
 .../server/orm/dao/AlertDispatchDAOTest.java    |   20 +-
 .../ambari/server/orm/dao/CrudDAOTest.java      |    5 +-
 .../server/orm/dao/ServiceConfigDAOTest.java    |   13 +-
 .../ambari/server/orm/dao/UpgradeDAOTest.java   |   16 +-
 .../ComponentVersionCheckActionTest.java        |   29 +-
 .../upgrades/UpgradeActionTest.java             |   74 +-
 .../ambari/server/state/ConfigHelperTest.java   |   14 +-
 .../state/cluster/AlertDataManagerTest.java     |    6 +-
 .../AlertNoticeDispatchServiceTest.java         |  159 +-
 .../server/topology/AmbariContextTest.java      |   69 +-
 .../server/upgrade/UpgradeCatalog250Test.java   |  291 +-
 .../AMBARI_METRICS/test_metrics_collector.py    |    6 +-
 .../AMBARI_METRICS/test_metrics_grafana.py      |    7 +-
 .../2.0.6/HDFS/test_alert_metrics_deviation.py  |    2 +
 .../stacks/2.0.6/HIVE/test_hive_client.py       |    4 +-
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |    6 +-
 .../stacks/2.0.6/HIVE/test_hive_server.py       |    4 +-
 .../stacks/2.2/common/test_stack_advisor.py     |    7 +-
 .../stacks/2.4/LOGSEARCH/test_logfeeder.py      |   23 +-
 .../stacks/2.4/LOGSEARCH/test_logsearch.py      |   19 +-
 .../test/python/stacks/2.4/configs/default.json |    1 +
 .../stacks/2.5/HIVE/test_hive_server_int.py     |   12 +-
 .../python/stacks/2.5/SPARK/test_spark_livy.py  |   31 +
 .../stacks/2.5/common/test_stack_advisor.py     |  320 +-
 .../test/python/stacks/2.6/DRUID/test_druid.py  |   14 +-
 .../stacks/2.6/SPARK2/test_spark_livy2.py       |   31 +
 .../test/python/stacks/2.6/configs/default.json |    6 +-
 ambari-server/src/test/python/unitTests.py      |  106 +-
 .../HDP/2.1.1/services/STORM/metrics.json       |   10 +-
 .../rangerAdmin/step3_controller.js             |   47 +-
 .../rangerAdmin/step4_controller.js             |   68 +-
 .../rangerAdmin/wizard_controller.js            |   45 +-
 ambari-web/app/controllers/main/host/details.js |   69 +-
 ambari-web/app/controllers/main/service.js      |    2 +-
 .../controllers/main/service/info/configs.js    |   13 +-
 .../controllers/main/service/info/summary.js    |    2 +-
 ambari-web/app/controllers/main/service/item.js |   82 +-
 .../service/manage_config_groups_controller.js  |   10 +-
 .../app/mappers/component_config_mapper.js      |   68 +-
 .../app/mappers/components_state_mapper.js      |    5 +-
 .../app/mappers/configs/config_groups_mapper.js |    2 +-
 ambari-web/app/mappers/server_data_mapper.js    |    1 +
 ambari-web/app/messages.js                      |    3 +-
 .../app/mixins/common/configs/configs_loader.js |    2 +-
 .../app/mixins/common/track_request_mixin.js    |   36 +-
 ambari-web/app/models/client_component.js       |    1 +
 ambari-web/app/models/host_component.js         |   17 +-
 ambari-web/app/models/service.js                |   21 +-
 ambari-web/app/router.js                        |   37 +-
 ambari-web/app/routes/main.js                   |   15 +-
 .../app/routes/ra_high_availability_routes.js   |    8 +-
 .../modal_popups/cluster_check_dialog.hbs       |   12 +-
 .../templates/main/admin/service_auto_start.hbs |   28 +-
 .../app/templates/main/dashboard/widgets.hbs    |   10 +-
 ambari-web/app/utils/ajax/ajax.js               |    2 +-
 .../views/common/configs/config_history_flow.js |    8 +-
 .../common/modal_popups/cluster_check_popup.js  |   68 +-
 .../app/views/main/admin/service_auto_start.js  |    8 +-
 ambari-web/app/views/main/dashboard/widget.js   |   11 +-
 ambari-web/app/views/main/dashboard/widgets.js  |  116 +-
 ambari-web/app/views/main/host.js               |    1 +
 ambari-web/app/views/main/service/menu.js       |    4 +-
 .../rangerAdmin/step3_controller_test.js        |   44 +-
 .../main/host/configs_service_test.js           |    2 +
 .../test/controllers/main/host/details_test.js  |    7 +
 .../main/service/info/config_test.js            |   87 +-
 .../test/controllers/main/service/item_test.js  |  100 +-
 .../test/controllers/main/service_test.js       |   31 +-
 .../common/configs/configs_loader_test.js       |    7 +-
 ambari-web/test/models/service_test.js          |   66 +-
 .../modal_popups/cluster_check_popup_test.js    |   48 +-
 .../views/main/admin/service_auto_start_test.js |    2 +
 .../test/views/main/dashboard/widget_test.js    |   21 +-
 .../test/views/main/dashboard/widgets_test.js   |  161 +-
 ambari-web/test/views/main/host_test.js         |   18 +-
 contrib/utils/perf/deploy-gce-perf-cluster.py   |    5 +-
 .../main/resources/ui/app/controllers/queue.js  |  103 +-
 .../main/resources/ui/app/controllers/queues.js |    2 +-
 .../src/main/resources/ui/app/models/queue.js   |   11 +
 .../src/main/resources/ui/app/serializers.js    |    5 +
 .../src/main/resources/ui/app/store.js          |    9 +
 .../main/resources/ui/app/templates/queue.hbs   |   55 +-
 .../view/hive2/resources/uploads/CSVParams.java |    2 +-
 .../hive2/resources/uploads/UploadService.java  |    2 +-
 .../resources/uploads/query/QueryGenerator.java |    1 -
 .../resources/uploads/query/RowFormat.java      |    2 +-
 .../resources/uploads/query/TableInfo.java      |    2 +-
 .../resources/upload/QueryGeneratorTest.java    |    2 +-
 .../apache/ambari/view/hive20/DataMigrator.java |   19 +-
 .../view/hive20/internal/dto/ColumnInfo.java    |   32 +-
 .../view/hive20/internal/dto/TableStats.java    |   11 +
 .../internal/parsers/TableMetaParserImpl.java   |    8 +
 .../generators/AlterTableQueryGenerator.java    |   82 +-
 .../CreateDatabaseQueryGenerator.java           |   44 +
 .../DeleteDatabaseQueryGenerator.java           |    2 +-
 .../generators/InsertFromQueryGenerator.java    |   77 +
 .../query/generators/QueryGenerationUtils.java  |    5 +-
 .../view/hive20/resources/browser/DDLProxy.java |   14 +-
 .../hive20/resources/browser/DDLService.java    |   51 +-
 .../view/hive20/resources/jobs/JobService.java  |    1 +
 .../resources/system/ranger/RangerService.java  |   32 +-
 .../hive20/resources/uploads/CSVParams.java     |    2 +-
 .../resources/uploads/TableDataReader.java      |    5 +-
 .../hive20/resources/uploads/TableInput.java    |   51 -
 .../resources/uploads/UploadFromHdfsInput.java  |    8 +-
 .../hive20/resources/uploads/UploadService.java |  107 +-
 .../resources/uploads/parsers/Parser.java       |   11 +-
 .../resources/uploads/parsers/PreviewData.java  |    9 +-
 .../uploads/query/InsertFromQueryInput.java     |   10 +-
 .../resources/uploads/query/QueryGenerator.java |  143 -
 .../resources/uploads/query/RowFormat.java      |   57 -
 .../resources/uploads/query/TableInfo.java      |   97 -
 .../resources/ui/app/adapters/application.js    |   33 +-
 .../main/resources/ui/app/adapters/database.js  |    8 +
 .../resources/ui/app/adapters/file-resource.js  |   26 +
 .../resources/ui/app/adapters/file-uploader.js  |   28 +
 .../src/main/resources/ui/app/adapters/query.js |    5 +
 .../resources/ui/app/adapters/saved-query.js    |    2 +-
 .../src/main/resources/ui/app/adapters/table.js |   15 +
 .../src/main/resources/ui/app/adapters/udf.js   |   39 +
 .../resources/ui/app/adapters/upload-table.js   |   93 +
 .../resources/ui/app/components/column-item.js  |    1 +
 .../ui/app/components/create-database-form.js   |   59 +
 .../resources/ui/app/components/create-table.js |    6 +-
 .../ui/app/components/csv-format-params.js      |   76 +
 .../resources/ui/app/components/edit-table.js   |  220 ++
 .../ui/app/components/fileresource-item.js      |   73 +
 .../resources/ui/app/components/jobs-browser.js |   39 +-
 .../ui/app/components/property-item.js          |    1 +
 .../resources/ui/app/components/query-editor.js |   10 +-
 .../ui/app/components/query-result-table.js     |    4 +
 .../resources/ui/app/components/radio-button.js |   40 +
 .../resources/ui/app/components/simple-table.js |   22 +
 .../app/components/table-advanced-settings.js   |   10 +
 .../ui/app/components/table-columns.js          |    3 +-
 .../ui/app/components/table-properties.js       |    3 +-
 .../ui/app/components/table-rename-form.js      |   63 +
 .../ui/app/components/table-statistics.js       |    2 +-
 .../resources/ui/app/components/udf-edit.js     |   40 +
 .../resources/ui/app/components/udf-item.js     |  154 +
 .../main/resources/ui/app/components/udf-new.js |   41 +
 .../ui/app/components/upload-table-source.js    |   48 +
 .../resources/ui/app/components/upload-table.js |   60 +
 .../ui/app/components/validated-text-field.js   |   62 +
 .../ui/app/components/visual-explain-detail.js  |   31 +
 .../ui/app/components/visual-explain.js         |   89 +
 .../resources/ui/app/configs/edit-table-tabs.js |   48 +
 .../resources/ui/app/configs/file-format.js     |    4 +-
 .../main/resources/ui/app/configs/helpers.js    |   14 +-
 .../resources/ui/app/controllers/application.js |    5 +-
 .../resources/ui/app/controllers/messages.js    |   30 +
 .../ui/app/controllers/messages/message.js      |   31 +
 .../resources/ui/app/controllers/saved-query.js |   22 +
 .../main/resources/ui/app/controllers/udfs.js   |   22 +
 .../resources/ui/app/controllers/udfs/new.js    |   23 +
 .../app/helpers/alert-message-context-class.js  |   27 +
 .../ui/app/helpers/alert-message-icon-class.js  |   37 +
 .../resources/ui/app/helpers/shorten-text.js    |   32 +
 .../resources/ui/app/locales/en/translations.js |  111 +
 .../main/resources/ui/app/mixins/ui-logger.js   |   15 +
 .../src/main/resources/ui/app/models/column.js  |   33 +-
 .../resources/ui/app/models/file-resource.js    |   25 +
 .../src/main/resources/ui/app/models/udf.js     |   26 +
 .../hive20/src/main/resources/ui/app/router.js  |   12 +-
 .../main/resources/ui/app/routes/application.js |    3 +-
 .../main/resources/ui/app/routes/databases.js   |   16 +-
 .../databases/database/tables/new-database.js   |   65 +
 .../app/routes/databases/database/tables/new.js |   84 +-
 .../routes/databases/database/tables/table.js   |    8 +-
 .../databases/database/tables/table/edit.js     |   82 +
 .../databases/database/tables/table/rename.js   |   80 +
 .../databases/database/tables/upload-table.js   |  784 ++++
 .../src/main/resources/ui/app/routes/jobs.js    |    3 -
 .../resources/ui/app/routes/queries/query.js    |  136 +-
 .../src/main/resources/ui/app/routes/query.js   |  305 --
 .../resources/ui/app/routes/savedqueries.js     |   50 +-
 .../main/resources/ui/app/routes/settings.js    |   10 +-
 .../src/main/resources/ui/app/routes/udfs.js    |   50 +
 .../main/resources/ui/app/routes/udfs/new.js    |  161 +
 .../resources/ui/app/services/alert-messages.js |   13 +-
 .../resources/ui/app/services/file-resource.js  |   33 +
 .../src/main/resources/ui/app/services/jobs.js  |    3 +
 .../src/main/resources/ui/app/services/query.js |   14 +-
 .../resources/ui/app/services/saved-queries.js  |    5 +-
 .../ui/app/services/table-operations.js         |   54 +-
 .../src/main/resources/ui/app/services/udf.js   |   76 +
 .../src/main/resources/ui/app/styles/app.scss   |  202 +-
 .../app/templates/components/alert-message.hbs  |    2 +-
 .../ui/app/templates/components/column-item.hbs |   15 +-
 .../app/templates/components/confirm-dialog.hbs |    2 +-
 .../components/create-database-form.hbs         |   31 +
 .../templates/components/csv-format-params.hbs  |  130 +
 .../ui/app/templates/components/edit-table.hbs  |   65 +
 .../app/templates/components/export-result.hbs  |    2 +-
 .../templates/components/fileresource-item.hbs  |   32 +
 .../templates/components/hdfs-viewer-modal.hbs  |    2 +-
 .../ui/app/templates/components/info-dialog.hbs |    2 +-
 .../ui/app/templates/components/job-item.hbs    |    2 +-
 .../app/templates/components/jobs-browser.hbs   |   17 +-
 .../ui/app/templates/components/list-item.hbs   |    2 +-
 .../app/templates/components/property-item.hbs  |   10 +-
 .../templates/components/query-result-table.hbs |    4 +-
 .../app/templates/components/radio-button.hbs   |   19 +
 .../app/templates/components/simple-table.hbs   |   42 +
 .../components/table-advanced-settings.hbs      |  297 +-
 .../app/templates/components/table-columns.hbs  |    1 +
 .../templates/components/table-properties.hbs   |    1 +
 .../templates/components/table-rename-form.hbs  |   31 +
 .../templates/components/table-statistics.hbs   |    4 +
 .../ui/app/templates/components/udf-edit.hbs    |   67 +
 .../ui/app/templates/components/udf-item.hbs    |   76 +
 .../ui/app/templates/components/udf-new.hbs     |   61 +
 .../components/upload-table-source.hbs          |  112 +
 .../app/templates/components/upload-table.hbs   |   59 +
 .../components/validated-text-field.hbs         |   23 +
 .../components/visual-explain-detail.hbs        |   29 +
 .../app/templates/components/visual-explain.hbs |   42 +
 .../ui/app/templates/databases-loading.hbs      |   21 +
 .../resources/ui/app/templates/databases.hbs    |    4 +-
 .../databases/database/tables-loading.hbs       |   24 +
 .../databases/database/tables/new-database.hbs  |   47 +
 .../templates/databases/database/tables/new.hbs |    6 +-
 .../databases/database/tables/table-loading.hbs |   21 +
 .../databases/database/tables/table.hbs         |    7 +-
 .../databases/database/tables/table/auth.hbs    |   41 +-
 .../databases/database/tables/table/edit.hbs    |   45 +
 .../databases/database/tables/table/rename.hbs  |   46 +
 .../databases/database/tables/upload-table.hbs  |   45 +
 .../resources/ui/app/templates/jobs-loading.hbs |   20 +
 .../resources/ui/app/templates/messages.hbs     |   14 +-
 .../ui/app/templates/messages/message.hbs       |    6 +-
 .../ui/app/templates/queries/query.hbs          |   69 +-
 .../main/resources/ui/app/templates/query.hbs   |   84 -
 .../ui/app/templates/savedqueries-loading.hbs   |   21 +
 .../resources/ui/app/templates/savedqueries.hbs |    6 +-
 .../ui/app/templates/service-check.hbs          |    2 +-
 .../ui/app/templates/settings-loading.hbs       |   21 +
 .../resources/ui/app/templates/udfs-loading.hbs |   21 +
 .../main/resources/ui/app/templates/udfs.hbs    |   28 +
 .../resources/ui/app/templates/udfs/new.hbs     |   30 +
 .../main/resources/ui/app/utils/constants.js    |   64 +
 .../resources/ui/app/utils/hive-explainer.js    |  645 ++++
 .../hive20/src/main/resources/ui/bower.json     |    5 +-
 .../src/main/resources/ui/config/environment.js |   10 +
 .../src/main/resources/ui/ember-cli-build.js    |    3 +
 .../hive20/src/main/resources/ui/package.json   |    3 +
 .../views/hive20/src/main/resources/view.xml    |    2 +-
 .../AlterTableQueryGenerationSpecTest.groovy    |   59 -
 .../AlterTableQueryGeneratorTest.java           |  161 +-
 .../resources/upload/DataParserCSVTest.java     |   29 +-
 .../resources/upload/DataParserJSONTest.java    |   37 +-
 .../resources/upload/DataParserXMLTest.java     |   20 +-
 .../resources/upload/QueryGeneratorTest.java    |  108 -
 .../resources/upload/TableDataReaderTest.java   |    5 +-
 ...HiveHistoryQueryMigrationImplementation.java |    6 +-
 .../ui/app/components/bundle-config.js          |    1 +
 .../resources/ui/app/components/coord-config.js |    1 +
 .../ui/app/components/decision-add-branch.js    |    3 +-
 .../ui/app/components/designer-workspace.js     |   37 +-
 .../resources/ui/app/components/drafts-wf.js    |   55 +-
 .../ui/app/components/flow-designer.js          |   27 +-
 .../resources/ui/app/components/job-details.js  |   52 +-
 .../ui/app/components/transition-config.js      |    2 +-
 .../ui/app/domain/cytoscape-flow-renderer.js    |   38 +-
 .../resources/ui/app/domain/findnode-mixin.js   |   10 +-
 .../resources/ui/app/domain/node-handler.js     |   10 +-
 .../src/main/resources/ui/app/domain/node.js    |    2 +-
 .../main/resources/ui/app/domain/workflow.js    |    6 +-
 .../src/main/resources/ui/app/routes/design.js  |    7 -
 .../src/main/resources/ui/app/styles/app.less   |   41 +-
 .../templates/components/designer-workspace.hbs |   73 +-
 .../ui/app/templates/components/drafts-wf.hbs   |   19 +-
 .../app/templates/components/flow-designer.hbs  |    2 +-
 .../ui/app/templates/components/help-icon.hbs   |    2 +-
 .../resources/ui/app/templates/dashboard.hbs    |    2 +-
 .../main/resources/ui/app/utils/constants.js    |    4 +-
 docs/pom.xml                                    |   16 +-
 pom.xml                                         |    4 +
 560 files changed, 25349 insertions(+), 14799 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/341cb124/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/341cb124/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/341cb124/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/341cb124/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/341cb124/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/341cb124/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/341cb124/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
----------------------------------------------------------------------


[25/50] [abbrv] ambari git commit: AMBARI-19962 Clicking on the login button (or hitting page refresh) to seeing the dashboard takes a while on a 1000-node cluster. (atkach)

Posted by nc...@apache.org.
AMBARI-19962 Clicking on the login button (or hitting page refresh) to seeing the dashboard takes a while on a 1000-node cluster. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e8a99618
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e8a99618
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e8a99618

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: e8a9961841e0eaac2b471526068afabf37d10a35
Parents: fe1704e
Author: Andrii Tkach <at...@apache.org>
Authored: Fri Feb 10 17:59:58 2017 +0200
Committer: Andrii Tkach <at...@apache.org>
Committed: Fri Feb 10 21:37:29 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/router.js          | 37 ++++++++++++++++++++++++++--------
 ambari-web/app/utils/ajax/ajax.js |  2 +-
 2 files changed, 30 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e8a99618/ambari-web/app/router.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/router.js b/ambari-web/app/router.js
index d671e86..df54303 100644
--- a/ambari-web/app/router.js
+++ b/ambari-web/app/router.js
@@ -256,16 +256,35 @@ App.Router = Em.Router.extend({
 
   displayLoginName: Em.computed.truncate('loginName', 10, 10),
 
+  /**
+   * @type {$.ajax|null}
+   */
+  clusterDataRequest: null,
+
+  /**
+   * If request was already sent on login then use saved clusterDataRequest and don't make second call
+   * @returns {$.ajax}
+   */
+  getClusterDataRequest: function() {
+    var clusterDataRequest = this.get('clusterDataRequest');
+    if (clusterDataRequest) {
+      this.set('clusterDataRequest', null);
+      return clusterDataRequest;
+    } else {
+      return App.ajax.send({
+        name: 'router.login.clusters',
+        sender: this,
+        success: 'onAuthenticationSuccess',
+        error: 'onAuthenticationError'
+      });
+    }
+  },
+
   getAuthenticated: function () {
     var dfd = $.Deferred();
     var self = this;
     var auth = App.db.getAuthenticated();
-    App.ajax.send({
-      name: 'router.login.clusters',
-      sender: this,
-      success: 'onAuthenticationSuccess',
-      error: 'onAuthenticationError'
-    }).complete(function (xhr) {
+    this.getClusterDataRequest().complete(function (xhr) {
       if (xhr.state() === 'resolved') {
         // if server knows the user and user authenticated by UI
         if (auth) {
@@ -535,12 +554,12 @@ App.Router = Em.Router.extend({
       this.loginGetClustersSuccessCallback(self.get('clusterData'), {}, requestData);
     }
     else {
-      App.ajax.send({
+      this.set('clusterDataRequest', App.ajax.send({
         name: 'router.login.clusters',
         sender: self,
         data: requestData,
         success: 'loginGetClustersSuccessCallback'
-      });
+      }));
     }
   },
 
@@ -584,6 +603,8 @@ App.Router = Em.Router.extend({
         router.transitionToAdminView();
       }
     }
+    // set cluster name and security type
+    App.router.get('clusterController').reloadSuccessCallback(clustersData);
     App.set('isPermissionDataLoaded', true);
     App.router.get('loginController').postLogin(true, true);
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/e8a99618/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index e344128..ae3947f 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -2290,7 +2290,7 @@ var urls = {
     mock: '/data/users/privileges_{userName}.json'
   },
   'router.login.clusters': {
-    'real': '/clusters?fields=Clusters/provisioning_state',
+    'real': '/clusters?fields=Clusters/provisioning_state,Clusters/security_type',
     'mock': '/data/clusters/info.json'
   },
   'router.login.message': {


[45/50] [abbrv] ambari git commit: AMBARI-19982 Admin View: issues with Users/Groups table filters. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-19982 Admin View: issues with Users/Groups table filters. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a6445ac8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a6445ac8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a6445ac8

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: a6445ac838cc2b54e5ee5f943d4fa22a72fc7ad3
Parents: a7eafd5
Author: ababiichuk <ab...@hortonworks.com>
Authored: Mon Feb 13 13:56:29 2017 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Mon Feb 13 14:45:50 2017 +0200

----------------------------------------------------------------------
 .../controllers/clusters/UserAccessListCtrl.js  |  5 +-
 .../clusters/UserAccessListCtrl_test.js         | 90 --------------------
 2 files changed, 1 insertion(+), 94 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a6445ac8/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/UserAccessListCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/UserAccessListCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/UserAccessListCtrl.js
index 3737414..5dc6bd3 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/UserAccessListCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/UserAccessListCtrl.js
@@ -314,8 +314,6 @@ function($scope, $location, Cluster, $modal, $rootScope, $routeParams, Permissio
 
   $scope.clearFilters = function() {
     $scope.currentNameFilter = '';
-    $scope.isUserActive = true;
-    $scope.currentTypeFilter = $scope.typeFilterOptions[0];
     $scope.currentRoleFilter = $scope.roleFilterOptions[0];
     $scope.resetPagination();
   };
@@ -325,8 +323,7 @@ function($scope, $location, Cluster, $modal, $rootScope, $routeParams, Permissio
 
   $scope.$watch(
     function (scope) {
-      return Boolean(scope.currentNameFilter || (scope.currentTypeFilter && scope.currentTypeFilter.value)
-        || (scope.currentRoleFilter && scope.currentRoleFilter.value));
+      return Boolean(scope.currentNameFilter || (scope.currentRoleFilter && scope.currentRoleFilter.value));
     },
     function (newValue, oldValue, scope) {
       scope.isNotEmptyFilter = newValue;

http://git-wip-us.apache.org/repos/asf/ambari/blob/a6445ac8/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/clusters/UserAccessListCtrl_test.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/clusters/UserAccessListCtrl_test.js b/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/clusters/UserAccessListCtrl_test.js
index 42fb453..14c0975 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/clusters/UserAccessListCtrl_test.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/clusters/UserAccessListCtrl_test.js
@@ -77,10 +77,6 @@ describe('#Cluster', function () {
             value: 'CLUSTER.USER'
           }
         ];
-        scope.typeFilterOptions = [
-          {label: $t('common.user'), value: 'USER'},
-          {label: $t('common.group'), value: 'GROUP'}
-        ];
         scope.currentRoleFilter = scope.roleFilterOptions[1];
         scope.clearFilters();
         expect(scope.currentNameFilter).toEqual('');
@@ -98,16 +94,12 @@ describe('#Cluster', function () {
       var cases = [
         {
           currentNameFilter: '',
-          currentTypeFilter: null,
           currentRoleFilter: null,
           isNotEmptyFilter: false,
           title: 'no filters'
         },
         {
           currentNameFilter: '',
-          currentTypeFilter: {
-            value: ''
-          },
           currentRoleFilter: {
             value: ''
           },
@@ -116,9 +108,6 @@ describe('#Cluster', function () {
         },
         {
           currentNameFilter: 'a',
-          currentTypeFilter: {
-            value: ''
-          },
           currentRoleFilter: {
             value: ''
           },
@@ -127,9 +116,6 @@ describe('#Cluster', function () {
         },
         {
           currentNameFilter: '0',
-          currentTypeFilter: {
-            value: ''
-          },
           currentRoleFilter: {
             value: ''
           },
@@ -138,20 +124,6 @@ describe('#Cluster', function () {
         },
         {
           currentNameFilter: '',
-          currentTypeFilter: {
-            value: 'GROUP'
-          },
-          currentRoleFilter: {
-            value: ''
-          },
-          isNotEmptyFilter: true,
-          title: 'type filter'
-        },
-        {
-          currentNameFilter: '',
-          currentTypeFilter: {
-            value: ''
-          },
           currentRoleFilter: {
             value: 'CLUSTER.USER'
           },
@@ -160,64 +132,6 @@ describe('#Cluster', function () {
         },
         {
           currentNameFilter: 'a',
-          currentTypeFilter: {
-            value: 'GROUP'
-          },
-          currentRoleFilter: {
-            value: ''
-          },
-          isNotEmptyFilter: true,
-          title: 'name and type filters'
-        },
-        {
-          currentNameFilter: 'a',
-          currentTypeFilter: {
-            value: ''
-          },
-          currentRoleFilter: {
-            value: 'CLUSTER.USER'
-          },
-          isNotEmptyFilter: true,
-          title: 'name and role filters'
-        },
-        {
-          currentNameFilter: '0',
-          currentTypeFilter: {
-            value: 'GROUP'
-          },
-          currentRoleFilter: {
-            value: ''
-          },
-          isNotEmptyFilter: true,
-          title: 'name and type filters with "0" as string'
-        },
-        {
-          currentNameFilter: '0',
-          currentTypeFilter: {
-            value: ''
-          },
-          currentRoleFilter: {
-            value: 'CLUSTER.USER'
-          },
-          isNotEmptyFilter: true,
-          title: 'name and role filters with "0" as string'
-        },
-        {
-          currentNameFilter: '',
-          currentTypeFilter: {
-            value: 'GROUP'
-          },
-          currentRoleFilter: {
-            value: 'CLUSTER.USER'
-          },
-          isNotEmptyFilter: true,
-          title: 'type and role filters'
-        },
-        {
-          currentNameFilter: 'a',
-          currentTypeFilter: {
-            value: 'CLUSTER.USER'
-          },
           currentRoleFilter: {
             value: 'GROUP'
           },
@@ -226,9 +140,6 @@ describe('#Cluster', function () {
         },
         {
           currentNameFilter: '0',
-          currentTypeFilter: {
-            value: 'CLUSTER.USER'
-          },
           currentRoleFilter: {
             value: 'GROUP'
           },
@@ -241,7 +152,6 @@ describe('#Cluster', function () {
         it(item.title, function () {
           scope.currentNameFilter = item.currentNameFilter;
           scope.currentRoleFilter = item.currentRoleFilter;
-          scope.currentTypeFilter = item.currentTypeFilter;
           scope.$digest();
           expect(scope.isNotEmptyFilter).toEqual(item.isNotEmptyFilter);
         });


[08/50] [abbrv] ambari git commit: AMBARI-19909. Export Blueprints does not contain the settings object and hence the credential store values (Madhuvanthi Radhakrishnan via smohanty)

Posted by nc...@apache.org.
AMBARI-19909. Export Blueprints does not contain the settings object and hence the credential store values (Madhuvanthi Radhakrishnan via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bc806659
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bc806659
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bc806659

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: bc806659971c05b873aa769007afa2916b2041dc
Parents: 00ed415
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Thu Feb 9 18:39:22 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Thu Feb 9 18:58:34 2017 -0800

----------------------------------------------------------------------
 .../query/render/ClusterBlueprintRenderer.java  | 124 ++++++++++++++++
 .../render/ClusterBlueprintRendererTest.java    | 143 +++++++++++++++++++
 .../server/upgrade/UpgradeCatalog250Test.java   |  13 +-
 3 files changed, 274 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bc806659/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java b/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
index 342df44..4091ee8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
@@ -109,6 +109,20 @@ public class ClusterBlueprintRenderer extends BaseRenderer implements Renderer {
     if (resultTree.getChild(serviceType) == null) {
       resultTree.addChild(new HashSet<String>(), serviceType);
     }
+    TreeNode<Set<String>> serviceNode = resultTree.getChild(serviceType);
+    if (serviceNode == null) {
+      serviceNode = resultTree.addChild(new HashSet<String>(), serviceType);
+    }
+    String serviceComponentType = Resource.Type.Component.name();
+    TreeNode<Set<String>> serviceComponentNode = resultTree.getChild(
+      serviceType + "/" + serviceComponentType);
+    if (serviceComponentNode == null) {
+      serviceComponentNode = serviceNode.addChild(new HashSet<String>(), serviceComponentType);
+    }
+    serviceComponentNode.getObject().add("ServiceComponentInfo/cluster_name");
+    serviceComponentNode.getObject().add("ServiceComponentInfo/service_name");
+    serviceComponentNode.getObject().add("ServiceComponentInfo/component_name");
+    serviceComponentNode.getObject().add("ServiceComponentInfo/recovery_enabled");
 
     String hostType = Resource.Type.Host.name();
     String hostComponentType = Resource.Type.HostComponent.name();
@@ -214,9 +228,119 @@ public class ClusterBlueprintRenderer extends BaseRenderer implements Renderer {
 
     blueprintResource.setProperty("configurations", processConfigurations(topology));
 
+    //Fetch settings section for blueprint
+    blueprintResource.setProperty("settings", getSettings(clusterNode));
+
     return blueprintResource;
   }
 
+  /***
+   * Constructs the Settings object of the following form:
+   * "settings": [   {
+   "recovery_settings": [
+   {
+   "recovery_enabled": "true"
+   }   ]   },
+   {
+   "service_settings": [   {
+   "name": "HDFS",
+   "recovery_enabled": "true",
+   "credential_store_enabled": "true"
+   },
+   {
+   "name": "TEZ",
+   "recovery_enabled": "false"
+   },
+   {
+   "name": "HIVE",
+   "recovery_enabled": "false"
+   }   ]   },
+   {
+   "component_settings": [   {
+   "name": "DATANODE",
+   "recovery_enabled": "true"
+   }   ]   }   ]
+   *
+   * @param clusterNode
+   * @return A Collection<Map<String, Object>> which represents the Setting Object
+   */
+  private Collection<Map<String, Object>> getSettings(TreeNode<Resource> clusterNode) {
+    LOG.info("ClusterBlueprintRenderer: getSettings()");
+
+    //Initialize collections to create appropriate json structure
+    Collection<Map<String, Object>> blueprintSetting = new ArrayList<Map<String, Object>>();
+
+    Set<Map<String, String>> recoverySettingValue = new HashSet<Map<String, String>>();
+    Set<Map<String, String>> serviceSettingValue = new HashSet<Map<String, String>>();
+    Set<Map<String, String>> componentSettingValue = new HashSet<Map<String, String>>();
+
+    HashMap<String, String> property = new HashMap<>();
+    HashMap<String, String> componentProperty = new HashMap<>();
+    Boolean globalRecoveryEnabled = false;
+
+    //Fetch the services, to obtain ServiceInfo and ServiceComponents
+    Collection<TreeNode<Resource>> serviceChildren = clusterNode.getChild("services").getChildren();
+    for (TreeNode serviceNode : serviceChildren) {
+      ResourceImpl service = (ResourceImpl) serviceNode.getObject();
+      Map<String, Object> ServiceInfoMap = service.getPropertiesMap().get("ServiceInfo");
+
+      //service_settings population
+      property = new HashMap<>();
+      if (ServiceInfoMap.get("credential_store_supported").equals("true")) {
+        if (ServiceInfoMap.get("credential_store_enabled").equals("true")) {
+          property.put("name", ServiceInfoMap.get("service_name").toString());
+          property.put("credential_store_enabled", "true");
+        }
+      }
+
+      //Fetch the service Components to obtain ServiceComponentInfo
+      Collection<TreeNode<Resource>> componentChildren = serviceNode.getChild("components").getChildren();
+      for (TreeNode componentNode : componentChildren) {
+        ResourceImpl component = (ResourceImpl) componentNode.getObject();
+        Map<String, Object> ServiceComponentInfoMap = component.getPropertiesMap().get("ServiceComponentInfo");
+
+        if (ServiceComponentInfoMap.get("recovery_enabled").equals("true")) {
+          globalRecoveryEnabled = true;
+          property.put("name", ServiceInfoMap.get("service_name").toString());
+          property.put("recovery_enabled", "true");
+
+          //component_settings population
+          componentProperty = new HashMap<>();
+          componentProperty.put("name", ServiceComponentInfoMap.get("component_name").toString());
+          componentProperty.put("recovery_enabled", "true");
+        }
+      }
+
+      if (!property.isEmpty())
+        serviceSettingValue.add(property);
+      if (!componentProperty.isEmpty())
+        componentSettingValue.add(componentProperty);
+    }
+    //recovery_settings population
+    property = new HashMap<>();
+    if (globalRecoveryEnabled) {
+      property.put("recovery_enabled", "true");
+    } else {
+      property.put("recovery_enabled", "false");
+    }
+    recoverySettingValue.add(property);
+
+    //Add all the different setting values.
+    Map<String, Object> settingMap = new HashMap<>();
+    settingMap.put("recovery_settings", recoverySettingValue);
+    blueprintSetting.add(settingMap);
+
+    settingMap = new HashMap<>();
+    settingMap.put("service_settings", serviceSettingValue);
+    blueprintSetting.add(settingMap);
+
+    settingMap = new HashMap<>();
+    settingMap.put("component_settings", componentSettingValue);
+    blueprintSetting.add(settingMap);
+
+    return blueprintSetting;
+  }
+
   private Map<String, Object> getKerberosDescriptor(ClusterController clusterController, String clusterName) throws AmbariException {
     PredicateBuilder pb = new PredicateBuilder();
     Predicate predicate = pb.begin().property("Artifacts/cluster_name").equals(clusterName).and().

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc806659/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
index 19a6b5c..75ffd31 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
@@ -34,6 +34,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.net.InetAddress;
 import java.net.UnknownHostException;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
@@ -238,6 +239,148 @@ public class ClusterBlueprintRendererTest {
     assertTrue(propertyTree.getChild("Host/HostComponent").getObject().contains("HostRoles/component_name"));
   }
 
+  public TreeNode<Resource> createResultTreeSettingsObject(TreeNode<Resource> resultTree){
+    Resource clusterResource = new ResourceImpl(Resource.Type.Cluster);
+
+    clusterResource.setProperty("Clusters/cluster_name", "testCluster");
+    clusterResource.setProperty("Clusters/version", "HDP-1.3.3");
+
+    TreeNode<Resource> clusterTree = resultTree.addChild(clusterResource, "Cluster:1");
+
+    TreeNode<Resource> servicesTree = clusterTree.addChild(null, "services");
+    servicesTree.setProperty("isCollection", "true");
+
+    //Scenario 1 : Service with Credential Store enabled, Recovery enabled for Component:1 and not for Component:2
+    Resource serviceResource1 = new ResourceImpl(Resource.Type.Service);
+    serviceResource1.setProperty("ServiceInfo/service_name","Service:1");
+    serviceResource1.setProperty("ServiceInfo/credential_store_supported","true");
+    serviceResource1.setProperty("ServiceInfo/credential_store_enabled","true");
+    TreeNode<Resource> serviceTree = servicesTree.addChild(serviceResource1, "Service:1");
+
+    Resource ttComponentResource = new ResourceImpl(Resource.Type.Component);
+    ttComponentResource.setProperty("ServiceComponentInfo/component_name", "Component:1");
+    ttComponentResource.setProperty("ServiceComponentInfo/cluster_name", "testCluster");
+    ttComponentResource.setProperty("ServiceComponentInfo/service_name", "Service:1");
+    ttComponentResource.setProperty("ServiceComponentInfo/recovery_enabled", "true");
+
+    Resource dnComponentResource = new ResourceImpl(Resource.Type.Component);
+    dnComponentResource.setProperty("ServiceComponentInfo/component_name", "Component:2");
+    dnComponentResource.setProperty("ServiceComponentInfo/cluster_name", "testCluster");
+    dnComponentResource.setProperty("ServiceComponentInfo/service_name", "Service:1");
+    dnComponentResource.setProperty("ServiceComponentInfo/recovery_enabled", "false");
+
+    TreeNode<Resource> componentsTree1 = serviceTree.addChild(null, "components");
+    componentsTree1.setProperty("isCollection", "true");
+
+    componentsTree1.addChild(ttComponentResource, "Component:1");
+    componentsTree1.addChild(dnComponentResource, "Component:2");
+
+    //Scenario 2 :Service with Credential Store disabled, Recovery enabled for Component:1
+    Resource serviceResource2 = new ResourceImpl(Resource.Type.Service);
+    serviceResource2.setProperty("ServiceInfo/service_name","Service:2");
+    serviceResource2.setProperty("ServiceInfo/credential_store_supported","true");
+    serviceResource2.setProperty("ServiceInfo/credential_store_enabled","false");
+    serviceTree = servicesTree.addChild(serviceResource2, "Service:2");
+
+    ttComponentResource = new ResourceImpl(Resource.Type.Component);
+    ttComponentResource.setProperty("ServiceComponentInfo/component_name", "Component:1");
+    ttComponentResource.setProperty("ServiceComponentInfo/cluster_name", "testCluster");
+    ttComponentResource.setProperty("ServiceComponentInfo/service_name", "Service:2");
+    ttComponentResource.setProperty("ServiceComponentInfo/recovery_enabled", "true");
+
+    TreeNode<Resource> componentsTree2 = serviceTree.addChild(null, "components");
+    componentsTree2.setProperty("isCollection", "true");
+
+    componentsTree2.addChild(ttComponentResource, "Component:1");
+
+    //Scenario 3 :Service with both Credential Store and Recovery enabled as false
+    Resource serviceResource3 = new ResourceImpl(Resource.Type.Service);
+    serviceResource3.setProperty("ServiceInfo/service_name","Service:3");
+    serviceResource3.setProperty("ServiceInfo/credential_store_supported","false");
+    serviceResource3.setProperty("ServiceInfo/credential_store_enabled","false");
+    serviceTree = servicesTree.addChild(serviceResource3, "Service:3");
+
+    ttComponentResource = new ResourceImpl(Resource.Type.Component);
+    ttComponentResource.setProperty("ServiceComponentInfo/component_name", "Component:1");
+    ttComponentResource.setProperty("ServiceComponentInfo/cluster_name", "testCluster");
+    ttComponentResource.setProperty("ServiceComponentInfo/service_name", "Service:3");
+    ttComponentResource.setProperty("ServiceComponentInfo/recovery_enabled", "false");
+
+    TreeNode<Resource> componentsTree3 = serviceTree.addChild(null, "components");
+    componentsTree3.setProperty("isCollection", "true");
+
+    componentsTree3.addChild(ttComponentResource, "Component:1");
+
+    //Add empty configurations
+    Resource configurationsResource = new ResourceImpl(Resource.Type.Configuration);
+    clusterTree.addChild(configurationsResource, "configurations");
+
+    //Add empty hosts
+    Resource hostResource = new ResourceImpl(Resource.Type.Host);
+    clusterTree.addChild(hostResource, "hosts");
+
+    return resultTree;
+  }
+
+  @Test
+  public void testGetSettings_instance(){
+    Result result = new ResultImpl(true);
+
+    TreeNode<Resource> resultTree = createResultTreeSettingsObject(result.getResultTree());
+
+    ClusterBlueprintRenderer renderer = new TestBlueprintRenderer(topology);
+    Result blueprintResult = renderer.finalizeResult(result);
+    TreeNode<Resource> blueprintTree = blueprintResult.getResultTree();
+    TreeNode<Resource> blueprintNode = blueprintTree.getChildren().iterator().next();
+    Resource blueprintResource = blueprintNode.getObject();
+    Map<String, Map<String, Object>> propertiesMap = blueprintResource.getPropertiesMap();
+    Map<String,Object> children = propertiesMap.get("");
+
+    //Verify if required information is present in actual result
+    assertTrue(children.containsKey("settings"));
+
+    List<Map<String,Object>> settingValues = (ArrayList)children.get("settings");
+    Boolean isRecoverySettings = false;
+    Boolean isComponentSettings = false;
+    Boolean isServiceSettings = false;
+
+    //Verify actual values
+    for(Map<String,Object> settingProp : settingValues){
+      if(settingProp.containsKey("recovery_settings")){
+        isRecoverySettings = true;
+        HashSet<Map<String,String>> checkPropSize = (HashSet)settingProp.get("recovery_settings");
+        assertEquals(1,checkPropSize.size());
+        assertEquals("true",checkPropSize.iterator().next().get("recovery_enabled"));
+
+      }
+      if(settingProp.containsKey("component_settings")){
+        isComponentSettings = true;
+        HashSet<Map<String,String>> checkPropSize = (HashSet)settingProp.get("component_settings");
+        assertEquals(1,checkPropSize.size());
+        Map<String, String> finalProp = checkPropSize.iterator().next();
+        assertEquals("Component:1",finalProp.get("name"));
+        assertEquals("true",finalProp.get("recovery_enabled"));
+      }
+      if(settingProp.containsKey("service_settings")){
+        isServiceSettings = true;
+        HashSet<Map<String,String>> checkPropSize = (HashSet)settingProp.get("service_settings");
+        assertEquals(2,checkPropSize.size());
+        for(Map<String,String> finalProp : checkPropSize){
+          if(finalProp.containsKey("credential_store_enabled")){
+            assertEquals("Service:1",finalProp.get("name"));
+            assertEquals("true",finalProp.get("recovery_enabled"));
+          }
+          assertFalse(finalProp.get("name").equals("Service:3"));
+        }
+      }
+    }
+    //Verify if required information is present in actual result
+    assertTrue(isRecoverySettings);
+    assertTrue(isComponentSettings);
+    assertTrue(isServiceSettings);
+
+  }
+
   @Test
   public void testFinalizeProperties__instance_noComponentNode() {
     QueryInfo rootQuery = new QueryInfo(new ClusterResourceDefinition(), new HashSet<String>());

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc806659/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
index 09d56dc..1c742ef 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
@@ -47,12 +47,6 @@ import java.util.Set;
 
 import javax.persistence.EntityManager;
 
-import com.google.inject.AbstractModule;
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
-import com.google.inject.Provider;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.configuration.Configuration;
@@ -90,6 +84,13 @@ import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
 import com.google.gson.Gson;
 
+import com.google.inject.AbstractModule;
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.Provider;
+
 import junit.framework.AssertionFailedError;
 
 /**


[36/50] [abbrv] ambari git commit: AMBARI-19968. Control Log Level for all Hive components for log4j (Madhuvanthi Radhakrishnan via smohanty)

Posted by nc...@apache.org.
AMBARI-19968. Control Log Level for all Hive components for log4j (Madhuvanthi Radhakrishnan via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d6e0b267
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d6e0b267
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d6e0b267

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d6e0b267a26ee5a1ca8a35b2cf682c860653eef8
Parents: 6098d34
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Fri Feb 10 22:54:27 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Sat Feb 11 08:23:46 2017 -0800

----------------------------------------------------------------------
 .../ambari/server/state/theme/TabLayout.java    |  17 ++-
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     |   2 +-
 .../package/scripts/hive_interactive.py         |   4 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |   1 +
 .../HIVE/configuration/beeline-log4j2.xml       |   2 +-
 .../services/HIVE/configuration/hive-env.xml    |  29 +++++
 .../HIVE/configuration/hive-exec-log4j.xml      | 114 +++++++++++++++++
 .../HIVE/configuration/hive-exec-log4j2.xml     |   2 +-
 .../services/HIVE/configuration/hive-log4j.xml  | 126 +++++++++++++++++++
 .../services/HIVE/configuration/hive-log4j2.xml |   2 +-
 .../HIVE/configuration/llap-cli-log4j2.xml      |   2 +-
 .../HIVE/configuration/llap-daemon-log4j.xml    |   2 +-
 .../HIVE/configuration/webhcat-log4j.xml        |  83 ++++++++++++
 .../HDP/2.5/services/HIVE/themes/theme.json     |  48 +++++++
 .../stacks/2.0.6/HIVE/test_hive_client.py       |   4 +-
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |   6 +-
 .../stacks/2.0.6/HIVE/test_hive_server.py       |   4 +-
 .../stacks/2.5/HIVE/test_hive_server_int.py     |  10 +-
 18 files changed, 432 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/java/org/apache/ambari/server/state/theme/TabLayout.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/theme/TabLayout.java b/ambari-server/src/main/java/org/apache/ambari/server/state/theme/TabLayout.java
index 0e38040..4f6cf8f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/theme/TabLayout.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/theme/TabLayout.java
@@ -19,15 +19,16 @@
 package org.apache.ambari.server.state.theme;
 
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 
 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 import org.codehaus.jackson.annotate.JsonProperty;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
 
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
 
 @JsonSerialize(include= JsonSerialize.Inclusion.NON_NULL)
 @JsonIgnoreProperties(ignoreUnknown = true)
@@ -90,8 +91,12 @@ public class TabLayout {
         if (childSection.isRemoved()) {
           mergedSections.remove(childSection.getName());
         } else {
-          Section parentSection = mergedSections.get(childSection.getName());
-          childSection.mergeWithParent(parentSection);
+          if(mergedSections.containsKey(childSection.getName())) {
+            Section parentSection = mergedSections.get(childSection.getName());
+            childSection.mergeWithParent(parentSection);
+          }else{
+            childSection.mergeWithParent(childSection);
+          }
           mergedSections.put(childSection.getName(), childSection);
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 2a4fd30..b7b04a2 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -375,7 +375,7 @@ def fill_conf_dir(component_conf_dir):
            mode=mode_identified_for_file,
            group=params.user_group,
            owner=params.hive_user,
-           content=params.log4j_exec_props
+           content=InlineTemplate(params.log4j_exec_props)
       )
     elif (os.path.exists("{component_conf_dir}/{log4j_exec_filename}.template")):
       File(format("{component_conf_dir}/{log4j_exec_filename}"),

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
index 10c2e6c..3a70fcf 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
@@ -248,14 +248,14 @@ def hive_interactive(name=None):
          mode=mode_identified,
          group=params.user_group,
          owner=params.hive_user,
-         content=params.hive_exec_log4j2)
+         content=InlineTemplate(params.hive_exec_log4j2))
 
       beeline_log4j2_filename = 'beeline-log4j2.properties'
       File(format("{hive_server_interactive_conf_dir}/{beeline_log4j2_filename}"),
          mode=mode_identified,
          group=params.user_group,
          owner=params.hive_user,
-         content=params.beeline_log4j2)
+         content=InlineTemplate(params.beeline_log4j2))
 
       File(os.path.join(hive_server_interactive_conf_dir, "hadoop-metrics2-hiveserver2.properties"),
            owner=params.hive_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 936b194..a32fbfb 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -497,6 +497,7 @@ webhcat_log_maxfilesize = default("/configurations/webhcat-log4j/webhcat_log_max
 webhcat_log_maxbackupindex = default("/configurations/webhcat-log4j/webhcat_log_maxbackupindex", 20)
 hive_log_maxfilesize = default("/configurations/hive-log4j/hive_log_maxfilesize", 256)
 hive_log_maxbackupindex = default("/configurations/hive-log4j/hive_log_maxbackupindex", 30)
+hive_log_level = default("/configurations/hive-env/hive.log.level", "INFO")
 
 #hive-log4j.properties.template
 if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/beeline-log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/beeline-log4j2.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/beeline-log4j2.xml
index efe9d6e..e814045 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/beeline-log4j2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/beeline-log4j2.xml
@@ -46,7 +46,7 @@ name = BeelineLog4j2
 packages = org.apache.hadoop.hive.ql.log
 
 # list of properties
-property.hive.log.level = WARN
+property.hive.log.level = {{hive_log_level}}
 property.hive.root.logger = console
 
 # list of all appenders

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-env.xml
index 14e58bd..ca44bf2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-env.xml
@@ -20,6 +20,35 @@
  */
 -->
 <configuration supports_adding_forbidden="false">
+  <property>
+    <name>hive.log.level</name>
+    <description>Hive Log level to control log4j - Options are INFO, DEBUG, WARN, ERROR</description>
+    <value>INFO</value>
+    <display-name>Hive Log Level</display-name>
+    <value-attributes>
+      <type>value-list</type>
+      <entries>
+        <entry>
+          <value>INFO</value>
+          <label>INFO (Recommended)</label>
+        </entry>
+        <entry>
+          <value>DEBUG</value>
+          <label>DEBUG (Most Verbose)</label>
+        </entry>
+        <entry>
+          <value>WARN</value>
+          <label>WARN</label>
+        </entry>
+        <entry>
+          <value>ERROR</value>
+          <label>ERROR (Least Verbose)</label>
+        </entry>
+      </entries>
+      <selection-cardinality>1</selection-cardinality>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
   <!-- hive-env.sh -->
   <property>
     <name>content</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j.xml
new file mode 100644
index 0000000..4d09756
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j.xml
@@ -0,0 +1,114 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="false">
+  <property>
+    <name>content</name>
+    <display-name>hive-exec-log4j template</display-name>
+    <description>Custom hive-exec-log4j</description>
+    <value>
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Define some default values that can be overridden by system properties
+
+hive.log.threshold=ALL
+hive.root.logger={{hive_log_level}},FA
+hive.log.dir=${java.io.tmpdir}/${user.name}
+hive.query.id=hadoop
+hive.log.file=${hive.query.id}.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hive.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=${hive.log.threshold}
+
+#
+# File Appender
+#
+
+log4j.appender.FA=org.apache.log4j.FileAppender
+log4j.appender.FA.File=${hive.log.dir}/${hive.log.file}
+log4j.appender.FA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#custom logging levels
+#log4j.logger.xxx=DEBUG
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
+
+
+log4j.category.DataNucleus=ERROR,FA
+log4j.category.Datastore=ERROR,FA
+log4j.category.Datastore.Schema=ERROR,FA
+log4j.category.JPOX.Datastore=ERROR,FA
+log4j.category.JPOX.Plugin=ERROR,FA
+log4j.category.JPOX.MetaData=ERROR,FA
+log4j.category.JPOX.Query=ERROR,FA
+log4j.category.JPOX.General=ERROR,FA
+log4j.category.JPOX.Enhancer=ERROR,FA
+
+
+# Silence useless ZK logs
+log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,FA
+log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,FA
+
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml
index 3b935f8..b96a468 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml
@@ -46,7 +46,7 @@ name = HiveExecLog4j2
 packages = org.apache.hadoop.hive.ql.log
 
 # list of properties
-property.hive.log.level = INFO
+property.hive.log.level = {{hive_log_level}}
 property.hive.root.logger = FA
 property.hive.query.id = hadoop
 property.hive.log.dir = ${sys:java.io.tmpdir}/${sys:user.name}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j.xml
new file mode 100644
index 0000000..ca0fa49
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j.xml
@@ -0,0 +1,126 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="false">
+  <property>
+    <name>content</name>
+    <display-name>hive-log4j template</display-name>
+    <description>Custom log4j.properties</description>
+    <value>
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Define some default values that can be overridden by system properties
+hive.log.threshold=ALL
+hive.root.logger={{hive_log_level}},DRFA
+hive.log.dir=${java.io.tmpdir}/${user.name}
+hive.log.file=hive.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hive.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshold=${hive.log.threshold}
+
+#
+# Daily Rolling File Appender
+#
+# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files
+# for different CLI session.
+#
+# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+
+log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex= {{hive_log_maxbackupindex}}
+log4j.appender.DRFA.MaxFileSize = {{hive_log_maxfilesize}}MB
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
+log4j.appender.console.encoding=UTF-8
+
+#custom logging levels
+#log4j.logger.xxx=DEBUG
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
+
+
+log4j.category.DataNucleus=ERROR,DRFA
+log4j.category.Datastore=ERROR,DRFA
+log4j.category.Datastore.Schema=ERROR,DRFA
+log4j.category.JPOX.Datastore=ERROR,DRFA
+log4j.category.JPOX.Plugin=ERROR,DRFA
+log4j.category.JPOX.MetaData=ERROR,DRFA
+log4j.category.JPOX.Query=ERROR,DRFA
+log4j.category.JPOX.General=ERROR,DRFA
+log4j.category.JPOX.Enhancer=ERROR,DRFA
+
+
+# Silence useless ZK logs
+log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA
+log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA
+
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
index 01d556f..e6c925d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
@@ -67,7 +67,7 @@ name = HiveLog4j2
 packages = org.apache.hadoop.hive.ql.log
 
 # list of properties
-property.hive.log.level = INFO
+property.hive.log.level = {{hive_log_level}}
 property.hive.root.logger = DRFA
 property.hive.log.dir = ${sys:java.io.tmpdir}/${sys:user.name}
 property.hive.log.file = hive.log

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
index ff93265..a40c200 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
@@ -67,7 +67,7 @@ name = LlapCliLog4j2
 packages = org.apache.hadoop.hive.ql.log
 
 # list of properties
-property.hive.log.level = INFO
+property.hive.log.level = {{hive_log_level}}
 property.hive.root.logger = console
 property.hive.log.dir = ${sys:java.io.tmpdir}/${sys:user.name}
 property.hive.log.file = llap-cli.log

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml
index 9fe45b0..a26f7a1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml
@@ -74,7 +74,7 @@ name = LlapDaemonLog4j2
 packages = org.apache.hadoop.hive.ql.log
 
 # list of properties
-property.llap.daemon.log.level = INFO
+property.llap.daemon.log.level = {{hive_log_level}}
 property.llap.daemon.root.logger = console
 property.llap.daemon.log.dir = .
 property.llap.daemon.log.file = llapdaemon.log

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/webhcat-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/webhcat-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/webhcat-log4j.xml
new file mode 100644
index 0000000..7295e4c
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/webhcat-log4j.xml
@@ -0,0 +1,83 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="false">
+  <property>
+    <name>content</name>
+    <display-name>webhcat-log4j template</display-name>
+    <description>Custom webhcat-log4j.properties</description>
+    <value>
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Define some default values that can be overridden by system properties
+webhcat.root.logger = {{hive_log_level}}, standard
+webhcat.log.dir = .
+webhcat.log.file = webhcat.log
+
+log4j.rootLogger = ${webhcat.root.logger}
+
+# Logging Threshold
+log4j.threshhold = DEBUG
+
+log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender
+log4j.appender.standard.File = ${webhcat.log.dir}/${webhcat.log.file}
+log4j.appender.standard.MaxFileSize = {{webhcat_log_maxfilesize}}MB
+log4j.appender.standard.MaxBackupIndex = {{webhcat_log_maxbackupindex}}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern = .yyyy-MM-dd
+
+log4j.appender.DRFA.layout = org.apache.log4j.PatternLayout
+
+log4j.appender.standard.layout = org.apache.log4j.PatternLayout
+log4j.appender.standard.layout.conversionPattern = %-5p | %d{DATE} | %c | %m%n
+
+# Class logging settings
+log4j.logger.com.sun.jersey = DEBUG
+log4j.logger.com.sun.jersey.spi.container.servlet.WebComponent = ERROR
+log4j.logger.org.apache.hadoop = INFO
+log4j.logger.org.apache.hadoop.conf = WARN
+log4j.logger.org.apache.zookeeper = WARN
+log4j.logger.org.eclipse.jetty = INFO
+
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/themes/theme.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/themes/theme.json b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/themes/theme.json
index 9caf51e..befd313 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/themes/theme.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/themes/theme.json
@@ -1,5 +1,43 @@
 {
+
   "configuration" : {
+    "layouts": [
+      {
+        "name": "default",
+        "tabs": [
+          {
+            "name": "settings",
+            "display-name": "Settings",
+            "layout": {
+              "tab-rows": 6,
+              "tab-columns": 3,
+              "sections": [
+                {
+                  "name": "misc-settings",
+                  "display-name": "Miscellaneous Settings",
+                  "row-index": "2",
+                  "column-index": "0",
+                  "row-span": "1",
+                  "column-span": "3",
+                  "section-columns": "3",
+                  "section-rows": "2",
+                  "subsections": [
+                    {
+                      "name": "misc-row1-col1",
+                      "display-name": "Log Level",
+                      "row-index": "0",
+                      "column-index": "0",
+                      "row-span": "1",
+                      "column-span": "1"
+                    }
+                  ]
+                }
+              ]
+            }
+          }
+        ]
+      }
+      ],
     "placement" : {
       "configs" : [
         {
@@ -161,6 +199,10 @@
               }
             }
           ]
+        },
+        {
+          "config": "hive-env/hive.log.level",
+          "subsection-name": "misc-row1-col1"
         }
       ]
     },
@@ -243,6 +285,12 @@
             }
           ]
         }
+      },
+      {
+        "config": "hive-env/hive.log.level",
+        "widget": {
+          "type": "combo"
+        }
       }
     ]
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
index d102b9d..6afc298 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
@@ -61,7 +61,7 @@ class TestHiveClient(RMFTestCase):
         mode = 0644,
     )
     self.assertResourceCalled('File', '/usr/hdp/current/hive-client/conf/hive-exec-log4j.properties',
-        content = 'log4jproperties\nline2',
+        content = InlineTemplate('log4jproperties\nline2'),
         owner = 'hive',
         group = 'hadoop',
         mode = 0644,
@@ -141,7 +141,7 @@ class TestHiveClient(RMFTestCase):
         mode = 0644,
     )
     self.assertResourceCalled('File', '/usr/hdp/current/hive-client/conf/hive-exec-log4j.properties',
-        content = 'log4jproperties\nline2',
+        content = InlineTemplate('log4jproperties\nline2'),
         owner = 'hive',
         group = 'hadoop',
         mode = 0644,

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
index 9c04acc..9486e11 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
@@ -181,7 +181,7 @@ class TestHiveMetastore(RMFTestCase):
                               mode = 0644,
                               )
     self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-exec-log4j.properties',
-                              content = 'log4jproperties\nline2',
+                              content = InlineTemplate('log4jproperties\nline2'),
                               owner = 'hive',
                               group = 'hadoop',
                               mode = 0644,
@@ -304,7 +304,7 @@ class TestHiveMetastore(RMFTestCase):
                               mode = 0644,
                               )
     self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-exec-log4j.properties',
-                              content = 'log4jproperties\nline2',
+                              content = InlineTemplate('log4jproperties\nline2'),
                               owner = 'hive',
                               group = 'hadoop',
                               mode = 0644,
@@ -449,7 +449,7 @@ class TestHiveMetastore(RMFTestCase):
                               mode = 0644)
 
     self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-exec-log4j.properties',
-      content = 'log4jproperties\nline2',
+      content = InlineTemplate('log4jproperties\nline2'),
       mode = 420,
       group = 'hadoop',
       owner = 'hive')

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 50164cf..6592590 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -357,7 +357,7 @@ class TestHiveServer(RMFTestCase):
                               mode=0644
     )
     self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-exec-log4j.properties',
-                              content='log4jproperties\nline2',
+                              content=InlineTemplate('log4jproperties\nline2'),
                               owner='hive',
                               group='hadoop',
                               mode=0644,
@@ -565,7 +565,7 @@ class TestHiveServer(RMFTestCase):
                               mode = 0644,
     )
     self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-exec-log4j.properties',
-                              content='log4jproperties\nline2',
+                              content=InlineTemplate('log4jproperties\nline2'),
                               owner='hive',
                               group='hadoop',
                               mode=0644,

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6e0b267/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
index 281dba4..6f017bc 100644
--- a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
+++ b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
@@ -243,7 +243,7 @@ class TestHiveServerInteractive(RMFTestCase):
     self.assertNoMoreResources()
 
   '''
-  #restart should not call slider destroy
+  restart should not call slider destroy
   '''
   @patch("os.path.isfile")
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
@@ -578,13 +578,13 @@ class TestHiveServerInteractive(RMFTestCase):
                                     mode=0600,
                                     )
           self.assertResourceCalled('File', os.path.join(conf_dir, 'hive-exec-log4j2.properties'),
-                                    content='con\ntent',  # Test new line
+                                    content=InlineTemplate('con\ntent'),  # Test new line
                                     owner='hive',
                                     group='hadoop',
                                     mode=0600,
                                     )
           self.assertResourceCalled('File', os.path.join(conf_dir, 'beeline-log4j2.properties'),
-                                    content='con\ntent',  # Test new line
+                                    content=InlineTemplate('con\ntent'),  # Test new line
                                     owner='hive',
                                     group='hadoop',
                                     mode=0600,
@@ -643,13 +643,13 @@ class TestHiveServerInteractive(RMFTestCase):
                                     mode=0644,
           )
           self.assertResourceCalled('File', os.path.join(conf_dir, 'hive-exec-log4j2.properties'),
-                                    content='con\ntent',  # Test new line
+                                    content=InlineTemplate('con\ntent'),  # Test new line
                                     owner='hive',
                                     group='hadoop',
                                     mode=0644,
           )
           self.assertResourceCalled('File', os.path.join(conf_dir, 'beeline-log4j2.properties'),
-                                    content='con\ntent',  # Test new line
+                                    content=InlineTemplate('con\ntent'),  # Test new line
                                     owner='hive',
                                     group='hadoop',
                                     mode=0644,