You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/09/12 16:59:27 UTC

[01/10] ambari git commit: AMBARI-18322. [Grafana] Add Apache NiFi - Hosts dashboard (prajwal)

Repository: ambari
Updated Branches:
  refs/heads/branch-dev-patch-upgrade f9abdafaf -> e4cb41e0a


AMBARI-18322. [Grafana] Add Apache NiFi - Hosts dashboard (prajwal)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2ce9ab93
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2ce9ab93
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2ce9ab93

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 2ce9ab9308d31ed4f4d6df8dc7609d216f8473f2
Parents: f9abdaf
Author: Prajwal Rao <pr...@gmail.com>
Authored: Fri Sep 9 14:14:15 2016 -0700
Committer: Prajwal Rao <pr...@gmail.com>
Committed: Fri Sep 9 14:14:15 2016 -0700

----------------------------------------------------------------------
 .../HDF/grafana-nifi-hosts.json                 | 1127 ++++++++++++++++++
 1 file changed, 1127 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2ce9ab93/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-nifi-hosts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-nifi-hosts.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-nifi-hosts.json
new file mode 100644
index 0000000..94aa285
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-nifi-hosts.json
@@ -0,0 +1,1127 @@
+{
+  "id": null,
+  "title": "NiFi - Hosts",
+  "originalTitle": "NiFi - Hosts",
+  "tags": [
+    "nifi"
+  ],
+  "style": "dark",
+  "timezone": "browser",
+  "editable": true,
+  "hideControls": false,
+  "sharedCrosshair": false,
+  "rows": [
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "25px",
+      "panels": [
+        {
+          "content": "<h4 align=\"center\">NiFi Metrics per host for the cluster. Click on each row title to expand on demand to look at various metrics. </h4>\n<h6 style=\"color:red;\" align=\"center\">This dashboard is managed by Ambari.  You may lose any changes made to this dashboard.  If you want to customize, make your own copy.</h6>",
+          "editable": true,
+          "error": false,
+          "id": 13,
+          "isNew": true,
+          "links": [],
+          "mode": "html",
+          "span": 12,
+          "style": {},
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "title": "New row"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "200px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 10,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "app": "nifi",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "jvm.heap_usage",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM Heap Usage",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "percentunit",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 11,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "app": "nifi",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "jvm.file_descriptor_usage",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM File Descriptor Usage",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "percentunit",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 12,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "app": "nifi",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "jvm.uptime",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM Uptime",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "s",
+            "short"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "JVM Info"
+    },
+    {
+      "collapse": true,
+      "editable": true,
+      "height": "200px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 7,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "app": "nifi",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "ActiveThreads",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Active Threads",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 8,
+          "isNew": true,
+          "leftYAxisLabel": "",
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "app": "nifi",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "jvm.thread_count",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Thread Count",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 9,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "app": "nifi",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "jvm.daemon_thread_count",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Daemon Thread Count",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Thread Info"
+    },
+    {
+      "collapse": true,
+      "editable": true,
+      "height": "200px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 1,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "app": "nifi",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "FlowFilesReceivedLast5Minutes",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Flow Files Received",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": 0,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 2,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "app": "nifi",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "FlowFilesSentLast5Minutes",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Flow Files Sent",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 5,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "app": "nifi",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "FlowFilesQueued",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Flow Files Queued",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Flow File Info"
+    },
+    {
+      "collapse": true,
+      "editable": true,
+      "height": "200px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 3,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "app": "nifi",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "BytesReceivedLast5Minutes",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Bytes Received",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 4,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "app": "nifi",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "BytesSentLast5Minutes",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Bytes Sent",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 6,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": false,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "app": "nifi",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "BytesQueued",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Bytes Queued",
+          "tooltip": {
+            "shared": false,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "short"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Byte Info"
+    }
+  ],
+  "time": {
+    "from": "now-6h",
+    "to": "now"
+  },
+  "timepicker": {
+    "now": true,
+    "refresh_intervals": [
+      "5s",
+      "10s",
+      "30s",
+      "1m",
+      "5m",
+      "15m",
+      "30m",
+      "1h",
+      "2h",
+      "1d"
+    ],
+    "time_options": [
+      "5m",
+      "15m",
+      "1h",
+      "6h",
+      "12h",
+      "24h",
+      "2d",
+      "7d",
+      "30d"
+    ]
+  },
+  "templating": {
+    "list": [
+      {
+        "allFormat": "glob",
+        "current": {
+          "text": "nifi",
+          "value": "nifi"
+        },
+        "datasource": null,
+        "hideLabel": true,
+        "includeAll": false,
+        "multi": false,
+        "multiFormat": "glob",
+        "name": "component",
+        "options": [
+          {
+            "selected": true,
+            "text": "nifi",
+            "value": "nifi"
+          }
+        ],
+        "query": "nifi",
+        "refresh": false,
+        "type": "custom"
+      },
+      {
+        "allFormat": "glob",
+        "current": {
+          "text": "All",
+          "value": ""
+        },
+        "datasource": null,
+        "hideLabel": true,
+        "includeAll": true,
+        "multi": true,
+        "multiFormat": "glob",
+        "name": "hosts",
+        "options": [
+          {
+            "text": "All",
+            "value": "",
+            "selected": true
+          }
+        ],
+        "query": "hosts",
+        "refresh": true,
+        "regex": "",
+        "type": "query",
+        "useTags": false
+      },
+      {
+        "allFormat": "glob",
+        "current": {
+          "text": "top",
+          "value": "top"
+        },
+        "datasource": null,
+        "includeAll": false,
+        "label": "show",
+        "multi": false,
+        "multiFormat": "glob",
+        "name": "orientation",
+        "options": [
+          {
+            "selected": true,
+            "text": "top",
+            "value": "top"
+          },
+          {
+            "selected": false,
+            "text": "bottom",
+            "value": "bottom"
+          }
+        ],
+        "query": "top,bottom",
+        "refresh": false,
+        "type": "custom"
+      },
+      {
+        "allFormat": "glob",
+        "current": {
+          "text": "20",
+          "value": "20"
+        },
+        "datasource": null,
+        "hideLabel": true,
+        "includeAll": false,
+        "multi": false,
+        "multiFormat": "glob",
+        "name": "instances",
+        "options": [
+          {
+            "selected": true,
+            "text": "20",
+            "value": "20"
+          },
+          {
+            "selected": false,
+            "text": "1",
+            "value": "1"
+          },
+          {
+            "selected": false,
+            "text": "5",
+            "value": "5"
+          },
+          {
+            "selected": false,
+            "text": "10",
+            "value": "10"
+          },
+          {
+            "selected": false,
+            "text": "15",
+            "value": "15"
+          },
+          {
+            "selected": false,
+            "text": "50",
+            "value": "50"
+          },
+          {
+            "selected": false,
+            "text": "100",
+            "value": "100"
+          }
+        ],
+        "query": "20,1,5,10,15,50,100",
+        "refresh": false,
+        "type": "custom"
+      },
+      {
+        "allFormat": "glob",
+        "current": {
+          "text": "avg",
+          "value": "avg"
+        },
+        "datasource": null,
+        "includeAll": false,
+        "label": "aggregator",
+        "multi": false,
+        "multiFormat": "glob",
+        "name": "topagg",
+        "options": [
+          {
+            "selected": true,
+            "text": "avg",
+            "value": "avg"
+          },
+          {
+            "selected": false,
+            "text": "max",
+            "value": "max"
+          },
+          {
+            "selected": false,
+            "text": "sum",
+            "value": "sum"
+          }
+        ],
+        "query": "avg,max,sum",
+        "refresh": false,
+        "type": "custom"
+      }
+    ]
+  },
+  "annotations": {
+    "list": []
+  },
+  "schemaVersion": 8,
+  "version": 8,
+  "links": []
+}
\ No newline at end of file


[02/10] ambari git commit: AMBARI-18331 - JMX metric retrieval method may unnecessarily refresh metrics at a high rate (part2) (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-18331 - JMX metric retrieval method may unnecessarily refresh metrics at a high rate (part2) (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2547d8f2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2547d8f2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2547d8f2

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 2547d8f23356ac8aba168597f38c107b040d2bda
Parents: 2ce9ab9
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Sep 9 17:17:15 2016 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Sep 9 17:20:14 2016 -0400

----------------------------------------------------------------------
 .../ambari/server/state/services/MetricsRetrievalService.java      | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2547d8f2/ambari-server/src/main/java/org/apache/ambari/server/state/services/MetricsRetrievalService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/services/MetricsRetrievalService.java b/ambari-server/src/main/java/org/apache/ambari/server/state/services/MetricsRetrievalService.java
index 629f6ab..79e0e25 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/services/MetricsRetrievalService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/services/MetricsRetrievalService.java
@@ -198,7 +198,7 @@ public class MetricsRetrievalService extends AbstractService {
         TimeUnit.MINUTES).build();
 
     // enable the TTL cache if configured; otherwise leave it as null
-    int ttlSeconds = m_configuration.getMetricCacheTTLSeconds();
+    int ttlSeconds = m_configuration.getMetricsServiceRequestTTL();
     boolean ttlCacheEnabled = m_configuration.isMetricsServiceRequestTTLCacheEnabled();
     if (ttlCacheEnabled) {
       m_ttlUrlCache = CacheBuilder.newBuilder().expireAfterWrite(ttlSeconds,


[07/10] ambari git commit: AMBARI-18260. Journal node restart failing on RU from dergM10 to erie on Wire Encrypted cluster (aonishuk)

Posted by nc...@apache.org.
AMBARI-18260. Journal node restart failing on RU from dergM10 to erie on Wire Encrypted cluster (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2205f9cb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2205f9cb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2205f9cb

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 2205f9cbb6bec08f95735310c743e17267c3abec
Parents: 1a27910
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Sep 12 15:10:22 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Sep 12 15:10:22 2016 +0300

----------------------------------------------------------------------
 .../HDFS/2.1.0.2.0/package/scripts/utils.py            | 13 +++++++++++++
 1 file changed, 13 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2205f9cb/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
index f6987b3..966efa2 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
@@ -37,6 +37,19 @@ from resource_management.libraries.functions.show_logs import show_logs
 
 from zkfc_slave import ZkfcSlaveDefault
 
+import ssl
+from functools import wraps
+
+# patch ssl module to fix SSLv3 communication bug
+# for more info see http://stackoverflow.com/questions/9835506/urllib-urlopen-works-on-sslv3-urls-with-python-2-6-6-on-1-machine-but-not-wit
+def sslwrap(func):
+    @wraps(func)
+    def bar(*args, **kw):
+        kw['ssl_version'] = ssl.PROTOCOL_TLSv1
+        return func(*args, **kw)
+    return bar
+ssl.wrap_socket = sslwrap(ssl.wrap_socket)
+
 def safe_zkfc_op(action, env):
   """
   Idempotent operation on the zkfc process to either start or stop it.


[08/10] ambari git commit: AMBARI-18050 - Upgrade pre-req check code needs to be decoupled from CheckDescription class

Posted by nc...@apache.org.
AMBARI-18050 - Upgrade pre-req check code needs to be decoupled from CheckDescription class


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2961c480
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2961c480
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2961c480

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 2961c480e05ff179abe96eee4e699662ed820c5c
Parents: 2205f9c
Author: Tim Thorpe <tt...@apache.org>
Authored: Mon Sep 12 05:35:48 2016 -0700
Committer: Tim Thorpe <tt...@apache.org>
Committed: Mon Sep 12 05:35:48 2016 -0700

----------------------------------------------------------------------
 .../ambari/server/checks/CheckDescription.java  | 458 ++++++++++---------
 1 file changed, 250 insertions(+), 208 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2961c480/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
index aa8e20c..498481d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
@@ -27,262 +27,304 @@ import com.google.common.collect.ImmutableMap;
  * Enum that wraps the various type, text and failure messages for the checks
  * done for Stack Upgrades.
  */
-public enum CheckDescription {
-
-  CLIENT_RETRY(PrereqCheckType.SERVICE,
-      "Client Retry Properties",
-      new ImmutableMap.Builder<String, String>()
-        .put(ClientRetryPropertyCheck.HDFS_CLIENT_RETRY_DISABLED_KEY,
-            "The hdfs-site.xml property dfs.client.retry.policy.enabled should be set to \"false\" to failover quickly.")
-        .put(ClientRetryPropertyCheck.HIVE_CLIENT_RETRY_MISSING_KEY,
+public class CheckDescription {
+
+  public static CheckDescription CLIENT_RETRY = new CheckDescription("CLIENT_RETRY",
+    PrereqCheckType.SERVICE,
+    "Client Retry Properties",
+    new ImmutableMap.Builder<String, String>()
+      .put(ClientRetryPropertyCheck.HDFS_CLIENT_RETRY_DISABLED_KEY,
+          "The hdfs-site.xml property dfs.client.retry.policy.enabled should be set to \"false\" to failover quickly.")
+      .put(ClientRetryPropertyCheck.HIVE_CLIENT_RETRY_MISSING_KEY,
           "The hive-site.xml property hive.metastore.failure.retries should be set to a positive value.")
-        .put(ClientRetryPropertyCheck.OOZIE_CLIENT_RETRY_MISSING_KEY,
-          "The oozie-env.sh script must contain a retry count such as export OOZIE_CLIENT_OPTS=\"${OOZIE_CLIENT_OPTS} -Doozie.connection.retry.count=5\"").build()),
-
-  HOSTS_HEARTBEAT(PrereqCheckType.HOST,
-      "All hosts must be communicating with Ambari. Hosts which are not reachable should be placed in Maintenance Mode.",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-            "There are hosts which are not communicating with Ambari.").build()),
-
-  HEALTH(PrereqCheckType.CLUSTER,
-      "Cluster Health",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-            "The following issues have been detected on this cluster and should be addressed before upgrading: %s").build()),
-
-  SERVICE_CHECK(PrereqCheckType.SERVICE,
-      "Last Service Check should be more recent than the last configuration change for the given service",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-            "The following service configurations have been updated and their Service Checks should be run again: %s").build()),
-
-  HOSTS_MAINTENANCE_MODE(PrereqCheckType.HOST,
-      "Hosts in Maintenance Mode will be excluded from the upgrade.",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-            "There are hosts in Maintenance Mode which excludes them from being upgraded.").build()),
-
-  HOSTS_MASTER_MAINTENANCE(PrereqCheckType.HOST,
-      "Hosts in Maintenance Mode must not have any master components",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
+      .put(ClientRetryPropertyCheck.OOZIE_CLIENT_RETRY_MISSING_KEY,
+          "The oozie-env.sh script must contain a retry count such as export OOZIE_CLIENT_OPTS=\"${OOZIE_CLIENT_OPTS} -Doozie.connection.retry.count=5\"").build());
+
+  public static CheckDescription HOSTS_HEARTBEAT = new CheckDescription("HOSTS_HEARTBEAT",
+    PrereqCheckType.HOST,
+    "All hosts must be communicating with Ambari. Hosts which are not reachable should be placed in Maintenance Mode.",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "There are hosts which are not communicating with Ambari.").build());
+
+  public static CheckDescription HEALTH = new CheckDescription("HEALTH",
+    PrereqCheckType.CLUSTER,
+    "Cluster Health",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "The following issues have been detected on this cluster and should be addressed before upgrading: %s").build());
+
+  public static CheckDescription SERVICE_CHECK = new CheckDescription("SERVICE_CHECK",
+    PrereqCheckType.SERVICE,
+    "Last Service Check should be more recent than the last configuration change for the given service",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "The following service configurations have been updated and their Service Checks should be run again: %s").build());
+
+  public static CheckDescription HOSTS_MAINTENANCE_MODE = new CheckDescription("HOSTS_MAINTENANCE_MODE",
+    PrereqCheckType.HOST,
+    "Hosts in Maintenance Mode will be excluded from the upgrade.",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "There are hosts in Maintenance Mode which excludes them from being upgraded.").build());
+
+  public static CheckDescription HOSTS_MASTER_MAINTENANCE = new CheckDescription("HOSTS_MASTER_MAINTENANCE",
+    PrereqCheckType.HOST,
+    "Hosts in Maintenance Mode must not have any master components",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
           "The following hosts must not be in in Maintenance Mode since they host Master components: {{fails}}.")
-        .put(HostsMasterMaintenanceCheck.KEY_NO_UPGRADE_NAME,
+      .put(HostsMasterMaintenanceCheck.KEY_NO_UPGRADE_NAME,
           "Could not find suitable upgrade pack for %s %s to version {{version}}.")
-        .put(HostsMasterMaintenanceCheck.KEY_NO_UPGRADE_PACK,
-          "Could not find upgrade pack named %s.").build()),
+      .put(HostsMasterMaintenanceCheck.KEY_NO_UPGRADE_PACK,
+          "Could not find upgrade pack named %s.").build());
 
-  HOSTS_REPOSITORY_VERSION(PrereqCheckType.HOST,
-      "All hosts should have target version installed",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
+  public static CheckDescription HOSTS_REPOSITORY_VERSION = new CheckDescription("HOSTS_REPOSITORY_VERSION",
+    PrereqCheckType.HOST,
+    "All hosts should have target version installed",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
           "The following hosts must have version {{version}} installed: {{fails}}.")
-        .put(HostsRepositoryVersionCheck.KEY_NO_REPO_VERSION,
-          "Repository version {{version}} does not exist.").build()),
-
-  SECONDARY_NAMENODE_MUST_BE_DELETED(PrereqCheckType.HOST,
-      "The SNameNode component must be deleted from all hosts",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT, "The SNameNode component must be deleted from host: %s.").build()),
-
-  STORM_REST_API_MUST_BE_DELETED(PrereqCheckType.SERVICE,
-      "The STORM_REST_API component will no longer be available and must be deleted from the cluster before upgrading. The same functionality is now provided by STORM_UI_SERVER. First, stop the entire Storm service. Next, delete STORM_REST_API using the API, e.g., curl -u $user:$password -X DELETE -H 'X-Requested-By:admin' http://$server:8080/api/v1/clusters/$name/services/STORM/components/STORM_REST_API . Finally, start Storm service.",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT, "The following component must be deleted from the cluster: {{fails}}.").build()),
-
-  SERVICES_HIVE_MULTIPLE_METASTORES(PrereqCheckType.SERVICE,
-      "Hive Metastore Availability",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-          "Multiple Hive Metastore instances are recommended for Rolling Upgrade. This ensures that there is at least one Metastore running during the upgrade process.").build()),
-
-  SERVICES_MAINTENANCE_MODE(PrereqCheckType.SERVICE,
-      "No services can be in Maintenance Mode",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-          "The following Services must not be in Maintenance Mode: {{fails}}.").build()),
-
-  SERVICES_MR_DISTRIBUTED_CACHE(PrereqCheckType.SERVICE,
-      "MapReduce should reference Hadoop libraries from the distributed cache in HDFS",
-      new ImmutableMap.Builder<String, String>()
-        .put(ServicesMapReduceDistributedCacheCheck.KEY_APP_CLASSPATH,
+      .put(HostsRepositoryVersionCheck.KEY_NO_REPO_VERSION,
+          "Repository version {{version}} does not exist.").build());
+
+  public static CheckDescription SECONDARY_NAMENODE_MUST_BE_DELETED = new CheckDescription("SECONDARY_NAMENODE_MUST_BE_DELETED",
+    PrereqCheckType.HOST,
+    "The SNameNode component must be deleted from all hosts",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT, "The SNameNode component must be deleted from host: %s.").build());
+
+  public static CheckDescription STORM_REST_API_MUST_BE_DELETED = new CheckDescription("STORM_REST_API_MUST_BE_DELETED",
+    PrereqCheckType.SERVICE,
+    "The STORM_REST_API component will no longer be available and must be deleted from the cluster before upgrading. The same functionality is now provided by STORM_UI_SERVER. First, stop the entire Storm service. Next, delete STORM_REST_API using the API, e.g., curl -u $user:$password -X DELETE -H 'X-Requested-By:admin' http://$server:8080/api/v1/clusters/$name/services/STORM/components/STORM_REST_API . Finally, start Storm service.",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT, "The following component must be deleted from the cluster: {{fails}}.").build());
+
+  public static CheckDescription SERVICES_HIVE_MULTIPLE_METASTORES = new CheckDescription("SERVICES_HIVE_MULTIPLE_METASTORES",
+    PrereqCheckType.SERVICE,
+    "Hive Metastore Availability",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "Multiple Hive Metastore instances are recommended for Rolling Upgrade. This ensures that there is at least one Metastore running during the upgrade process.").build());
+
+  public static CheckDescription SERVICES_MAINTENANCE_MODE = new CheckDescription("SERVICES_MAINTENANCE_MODE",
+    PrereqCheckType.SERVICE,
+    "No services can be in Maintenance Mode",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "The following Services must not be in Maintenance Mode: {{fails}}.").build());
+
+  public static CheckDescription SERVICES_MR_DISTRIBUTED_CACHE = new CheckDescription("SERVICES_MR_DISTRIBUTED_CACHE",
+    PrereqCheckType.SERVICE,
+    "MapReduce should reference Hadoop libraries from the distributed cache in HDFS",
+    new ImmutableMap.Builder<String, String>()
+      .put(ServicesMapReduceDistributedCacheCheck.KEY_APP_CLASSPATH,
           "The mapred-site.xml property mapreduce.application.classpath should be set.")
-        .put(ServicesMapReduceDistributedCacheCheck.KEY_FRAMEWORK_PATH,
+      .put(ServicesMapReduceDistributedCacheCheck.KEY_FRAMEWORK_PATH,
           "The mapred-site.xml property mapreduce.application.framework.path should be set.")
-        .put(ServicesMapReduceDistributedCacheCheck.KEY_NOT_DFS,
-          "The mapred-site.xml property mapreduce.application.framework.path or the core-site.xml property fs.defaultFS should point to *dfs:/ url.").build()),
-
-  SERVICES_NAMENODE_HA(PrereqCheckType.SERVICE,
-      "NameNode High Availability must be enabled",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-          "NameNode High Availability is not enabled. Verify that dfs.internal.nameservices property is present in hdfs-site.xml.").build()),
-
-  SERVICES_NAMENODE_TRUNCATE(PrereqCheckType.SERVICE,
-      "NameNode Truncate must not be allowed",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-          "NameNode Truncate is allowed. Verify that dfs.allow.truncate is set to 'false' in hdfs-site.xml.").build()),
-
-  SERVICES_TEZ_DISTRIBUTED_CACHE(PrereqCheckType.SERVICE,
-      "Tez should reference Hadoop libraries from the distributed cache in HDFS",
-      new ImmutableMap.Builder<String, String>()
-        .put(ServicesTezDistributedCacheCheck.KEY_LIB_URI_MISSING,
+      .put(ServicesMapReduceDistributedCacheCheck.KEY_NOT_DFS,
+          "The mapred-site.xml property mapreduce.application.framework.path or the core-site.xml property fs.defaultFS should point to *dfs:/ url.").build());
+
+  public static CheckDescription SERVICES_NAMENODE_HA = new CheckDescription("SERVICES_NAMENODE_HA",
+    PrereqCheckType.SERVICE,
+    "NameNode High Availability must be enabled",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "NameNode High Availability is not enabled. Verify that dfs.internal.nameservices property is present in hdfs-site.xml.").build());
+
+  public static CheckDescription SERVICES_NAMENODE_TRUNCATE = new CheckDescription("SERVICES_NAMENODE_TRUNCATE",
+    PrereqCheckType.SERVICE,
+    "NameNode Truncate must not be allowed",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "NameNode Truncate is allowed. Verify that dfs.allow.truncate is set to 'false' in hdfs-site.xml.").build());
+
+  public static CheckDescription SERVICES_TEZ_DISTRIBUTED_CACHE = new CheckDescription("SERVICES_TEZ_DISTRIBUTED_CACHE",
+    PrereqCheckType.SERVICE,
+    "Tez should reference Hadoop libraries from the distributed cache in HDFS",
+    new ImmutableMap.Builder<String, String>()
+      .put(ServicesTezDistributedCacheCheck.KEY_LIB_URI_MISSING,
           "The tez-site.xml property tez.lib.uris should be set.")
-        .put(ServicesTezDistributedCacheCheck.KEY_USE_HADOOP_LIBS,
+      .put(ServicesTezDistributedCacheCheck.KEY_USE_HADOOP_LIBS,
           "The tez-site.xml property tez.use.cluster-hadoop-libs should be set.")
-        .put(ServicesTezDistributedCacheCheck.KEY_LIB_NOT_DFS,
+      .put(ServicesTezDistributedCacheCheck.KEY_LIB_NOT_DFS,
           "The tez-site.xml property tez.lib.uris or the core-site.xml property fs.defaultFS should point to *dfs:/ url.")
-        .put(ServicesTezDistributedCacheCheck.KEY_LIB_NOT_TARGZ,
+      .put(ServicesTezDistributedCacheCheck.KEY_LIB_NOT_TARGZ,
           "The tez-site.xml property tez.lib.uris should point to tar.gz file.")
-        .put(ServicesTezDistributedCacheCheck.KEY_USE_HADOOP_LIBS_FALSE,
-          "The tez-site.xml property tez.use.cluster.hadoop-libs should be set to false.").build()),
-
-  SERVICES_UP(PrereqCheckType.SERVICE,
-      "All services must be started",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-          "The following Services must be started: {{fails}}. Try to do a Stop & Start in case they were started outside of Ambari.").build()),
-
-  COMPONENTS_INSTALLATION(PrereqCheckType.SERVICE,
-      "All service components must be installed",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-            "The following Services must be reinstalled: {{fails}}. Try to reinstall the service components in INSTALL_FAILED state.").build()),
-
-  PREVIOUS_UPGRADE_COMPLETED(PrereqCheckType.CLUSTER,
-      "A previous upgrade did not complete.",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-            "The last upgrade attempt did not complete. {{fails}}").build()),
-
-  INSTALL_PACKAGES_CHECK(PrereqCheckType.CLUSTER,
-      "Install packages must be re-run",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-            "Re-run Install Packages before starting upgrade").build()),
-
-  SERVICES_YARN_WP(PrereqCheckType.SERVICE,
-      "YARN work preserving restart should be enabled",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-          "YARN should have work preserving restart enabled. The yarn-site.xml property yarn.resourcemanager.work-preserving-recovery.enabled property should be set to true.").build()),
-
-  SERVICES_YARN_RM_HA(PrereqCheckType.SERVICE,
-      "YARN ResourceManager High Availability is not enabled.",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-          "YARN ResourceManager HA should be enabled to prevent a disruption in service during the upgrade").build()),
-
-  SERVICES_YARN_TIMELINE_ST(PrereqCheckType.SERVICE,
-      "YARN Timeline state preserving restart should be enabled",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-          "YARN should have state preserving restart enabled for the Timeline server. The yarn-site.xml property yarn.timeline-service.recovery.enabled should be set to true.").build()),
-
-  SERVICES_MR2_JOBHISTORY_ST(PrereqCheckType.SERVICE,
-      "MapReduce2 JobHistory recovery should be enabled",
-      new ImmutableMap.Builder<String, String>()
-        .put(MapReduce2JobHistoryStatePreservingCheck.MAPREDUCE2_JOBHISTORY_RECOVERY_ENABLE_KEY,
+      .put(ServicesTezDistributedCacheCheck.KEY_USE_HADOOP_LIBS_FALSE,
+          "The tez-site.xml property tez.use.cluster.hadoop-libs should be set to false.").build());
+
+  public static CheckDescription SERVICES_UP = new CheckDescription("SERVICES_UP",
+    PrereqCheckType.SERVICE,
+    "All services must be started",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "The following Services must be started: {{fails}}. Try to do a Stop & Start in case they were started outside of Ambari.").build());
+
+  public static CheckDescription COMPONENTS_INSTALLATION = new CheckDescription("COMPONENTS_INSTALLATION",
+    PrereqCheckType.SERVICE,
+    "All service components must be installed",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "The following Services must be reinstalled: {{fails}}. Try to reinstall the service components in INSTALL_FAILED state.").build());
+
+  public static CheckDescription PREVIOUS_UPGRADE_COMPLETED = new CheckDescription("PREVIOUS_UPGRADE_COMPLETED",
+    PrereqCheckType.CLUSTER,
+    "A previous upgrade did not complete.",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "The last upgrade attempt did not complete. {{fails}}").build());
+
+  public static CheckDescription INSTALL_PACKAGES_CHECK = new CheckDescription("INSTALL_PACKAGES_CHECK",
+    PrereqCheckType.CLUSTER,
+    "Install packages must be re-run",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "Re-run Install Packages before starting upgrade").build());
+
+  public static CheckDescription SERVICES_YARN_WP = new CheckDescription("SERVICES_YARN_WP",
+    PrereqCheckType.SERVICE,
+    "YARN work preserving restart should be enabled",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "YARN should have work preserving restart enabled. The yarn-site.xml property yarn.resourcemanager.work-preserving-recovery.enabled property should be set to true.").build());
+
+  public static CheckDescription SERVICES_YARN_RM_HA = new CheckDescription("SERVICES_YARN_RM_HA",
+    PrereqCheckType.SERVICE,
+    "YARN ResourceManager High Availability is not enabled.",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "YARN ResourceManager HA should be enabled to prevent a disruption in service during the upgrade").build());
+
+  public static CheckDescription SERVICES_YARN_TIMELINE_ST = new CheckDescription("SERVICES_YARN_TIMELINE_ST",
+    PrereqCheckType.SERVICE,
+    "YARN Timeline state preserving restart should be enabled",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "YARN should have state preserving restart enabled for the Timeline server. The yarn-site.xml property yarn.timeline-service.recovery.enabled should be set to true.").build());
+
+  public static CheckDescription SERVICES_MR2_JOBHISTORY_ST = new CheckDescription("SERVICES_MR2_JOBHISTORY_ST",
+    PrereqCheckType.SERVICE,
+    "MapReduce2 JobHistory recovery should be enabled",
+    new ImmutableMap.Builder<String, String>()
+      .put(MapReduce2JobHistoryStatePreservingCheck.MAPREDUCE2_JOBHISTORY_RECOVERY_ENABLE_KEY,
           "MapReduce2 should have recovery enabled for the JobHistory server. The mapred-site.xml property mapreduce.jobhistory.recovery.enable should be set to true.")
-        .put(MapReduce2JobHistoryStatePreservingCheck.MAPREDUCE2_JOBHISTORY_RECOVERY_STORE_KEY,
+      .put(MapReduce2JobHistoryStatePreservingCheck.MAPREDUCE2_JOBHISTORY_RECOVERY_STORE_KEY,
           "MapReduce2 should have recovery enabled for the JobHistory server. The mapred-site.xml property mapreduce.jobhistory.recovery.store.class should be set to org.apache.hadoop.mapreduce.v2.hs.HistoryServerLeveldbStateStoreService.")
-        .put(MapReduce2JobHistoryStatePreservingCheck.MAPREDUCE2_JOBHISTORY_RECOVERY_STORE_LEVELDB_PATH_KEY,
-          "MapReduce2 should have recovery enabled for the JobHistory server. The mapred-site.xml property mapreduce.jobhistory.recovery.store.leveldb.path should be set. Please note that \"mapreduce.jobhistory.recovery.store.leveldb.path\" should be on a mount with ~3 GB of free space.").build()),
+      .put(MapReduce2JobHistoryStatePreservingCheck.MAPREDUCE2_JOBHISTORY_RECOVERY_STORE_LEVELDB_PATH_KEY,
+          "MapReduce2 should have recovery enabled for the JobHistory server. The mapred-site.xml property mapreduce.jobhistory.recovery.store.leveldb.path should be set. Please note that \"mapreduce.jobhistory.recovery.store.leveldb.path\" should be on a mount with ~3 GB of free space.").build());
 
-  SERVICES_HIVE_DYNAMIC_SERVICE_DISCOVERY(PrereqCheckType.SERVICE,
-      "Hive Dynamic Service Discovery",
-      new ImmutableMap.Builder<String, String>()
-        .put(HiveDynamicServiceDiscoveryCheck.HIVE_DYNAMIC_SERVICE_DISCOVERY_ENABLED_KEY,
+  public static CheckDescription SERVICES_HIVE_DYNAMIC_SERVICE_DISCOVERY = new CheckDescription("SERVICES_HIVE_DYNAMIC_SERVICE_DISCOVERY",
+    PrereqCheckType.SERVICE,
+    "Hive Dynamic Service Discovery",
+    new ImmutableMap.Builder<String, String>()
+      .put(HiveDynamicServiceDiscoveryCheck.HIVE_DYNAMIC_SERVICE_DISCOVERY_ENABLED_KEY,
           "The hive-site.xml property hive.server2.support.dynamic.service.discovery should be set to true.")
-        .put(HiveDynamicServiceDiscoveryCheck.HIVE_DYNAMIC_SERVICE_ZK_QUORUM_KEY,
+      .put(HiveDynamicServiceDiscoveryCheck.HIVE_DYNAMIC_SERVICE_ZK_QUORUM_KEY,
           "The hive-site.xml property hive.zookeeper.quorum should be set to a comma-separate list of ZooKeeper hosts:port pairs.")
-        .put(HiveDynamicServiceDiscoveryCheck.HIVE_DYNAMIC_SERVICE_ZK_NAMESPACE_KEY,
-          "The hive-site.xml property hive.server2.zookeeper.namespace should be set to the value for the root namespace on ZooKeeper.").build()),
+      .put(HiveDynamicServiceDiscoveryCheck.HIVE_DYNAMIC_SERVICE_ZK_NAMESPACE_KEY,
+          "The hive-site.xml property hive.server2.zookeeper.namespace should be set to the value for the root namespace on ZooKeeper.").build());
 
-  CONFIG_MERGE(PrereqCheckType.CLUSTER,
-      "Configuration Merge Check",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-          "The following config types will have values overwritten: %s").build()),
+  public static CheckDescription CONFIG_MERGE = new CheckDescription("CONFIG_MERGE",
+    PrereqCheckType.CLUSTER,
+    "Configuration Merge Check",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "The following config types will have values overwritten: %s").build());
 
-  HARDCODED_STACK_VERSION_PROPERTIES_CHECK(PrereqCheckType.CLUSTER,
+  public static CheckDescription HARDCODED_STACK_VERSION_PROPERTIES_CHECK = new CheckDescription("HARDCODED_STACK_VERSION_PROPERTIES_CHECK",
+    PrereqCheckType.CLUSTER,
     "Found hardcoded hdp stack version in property value.",
     new ImmutableMap.Builder<String, String>()
       .put(AbstractCheckDescriptor.DEFAULT,
-        "Some properties seem to contain hardcoded hdp version string \"%s\"." +
-          " That is a potential problem when doing stack update.").build()),
-
-  VERSION_MISMATCH(PrereqCheckType.HOST,
-      "All components must be reporting the expected version",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-            "There are components which are not reporting the expected stack version: \n%s").build()),
-
-  SERVICES_RANGER_PASSWORD_VERIFY(PrereqCheckType.SERVICE,
-      "Verify Ambari and Ranger Password Synchronization",
-      new ImmutableMap.Builder<String, String>()
-        .put(AbstractCheckDescriptor.DEFAULT,
-            "There was a problem verifying Ranger and Ambari users")
-        .put(RangerPasswordCheck.KEY_RANGER_PASSWORD_MISMATCH,
-            "Credentials for user '%s' in Ambari do not match Ranger.")
-        .put(RangerPasswordCheck.KEY_RANGER_UNKNOWN_RESPONSE,
-            "Could not verify credentials for user '%s'.  Response code %s received from %s")
-        .put(RangerPasswordCheck.KEY_RANGER_COULD_NOT_ACCESS,
-            "Could not access Ranger to verify user '%s' against %s. %s")
-        .put(RangerPasswordCheck.KEY_RANGER_USERS_ELEMENT_MISSING,
-            "The response from Ranger received, but there is no users element.  Request: %s")
-        .put(RangerPasswordCheck.KEY_RANGER_OTHER_ISSUE,
-            "The response from Ranger was malformed. %s. Request: %s")
-        .put(RangerPasswordCheck.KEY_RANGER_CONFIG_MISSING,
-            "Could not check credentials.  Missing property %s/%s").build()),
-
-  ATLAS_SERVICE_PRESENCE_CHECK(PrereqCheckType.SERVICE,
+          "Some properties seem to contain hardcoded hdp version string \"%s\"." +
+          " That is a potential problem when doing stack update.").build());
+
+  public static CheckDescription VERSION_MISMATCH = new CheckDescription("VERSION_MISMATCH",
+    PrereqCheckType.HOST,
+    "All components must be reporting the expected version",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "There are components which are not reporting the expected stack version: \n%s").build());
+
+  public static CheckDescription SERVICES_RANGER_PASSWORD_VERIFY = new CheckDescription("SERVICES_RANGER_PASSWORD_VERIFY",
+    PrereqCheckType.SERVICE,
+    "Verify Ambari and Ranger Password Synchronization",
+    new ImmutableMap.Builder<String, String>()
+      .put(AbstractCheckDescriptor.DEFAULT,
+          "There was a problem verifying Ranger and Ambari users")
+      .put(RangerPasswordCheck.KEY_RANGER_PASSWORD_MISMATCH,
+          "Credentials for user '%s' in Ambari do not match Ranger.")
+      .put(RangerPasswordCheck.KEY_RANGER_UNKNOWN_RESPONSE,
+          "Could not verify credentials for user '%s'.  Response code %s received from %s")
+      .put(RangerPasswordCheck.KEY_RANGER_COULD_NOT_ACCESS,
+          "Could not access Ranger to verify user '%s' against %s. %s")
+      .put(RangerPasswordCheck.KEY_RANGER_USERS_ELEMENT_MISSING,
+          "The response from Ranger received, but there is no users element.  Request: %s")
+      .put(RangerPasswordCheck.KEY_RANGER_OTHER_ISSUE,
+          "The response from Ranger was malformed. %s. Request: %s")
+      .put(RangerPasswordCheck.KEY_RANGER_CONFIG_MISSING,
+          "Could not check credentials.  Missing property %s/%s").build());
+
+  public static CheckDescription ATLAS_SERVICE_PRESENCE_CHECK = new CheckDescription("ATLAS_SERVICE_PRESENCE_CHECK",
+    PrereqCheckType.SERVICE,
     "Atlas Is Not Supported For Upgrades",
     new ImmutableMap.Builder<String, String>()
       .put(AbstractCheckDescriptor.DEFAULT,
-        "The Atlas service is currently installed on the cluster. " +
-        "This service does not support upgrades and must be removed before the upgrade can continue. " +
-        "After upgrading, Atlas can be reinstalled").build()),
+          "The Atlas service is currently installed on the cluster. " +
+          "This service does not support upgrades and must be removed before the upgrade can continue. " +
+          "After upgrading, Atlas can be reinstalled").build());
 
-  RANGER_SERVICE_AUDIT_DB_CHECK(PrereqCheckType.SERVICE,
+  public static CheckDescription RANGER_SERVICE_AUDIT_DB_CHECK = new CheckDescription("RANGER_SERVICE_AUDIT_DB_CHECK",
+    PrereqCheckType.SERVICE,
     "Remove the Ranger Audit to Database Capability",
     new ImmutableMap.Builder<String, String>()
       .put(AbstractCheckDescriptor.DEFAULT,
-        "After upgrading, Ranger will no longer support the Audit to Database feature. Instead, Ranger will audit to Solr. " +
-                  "To migrate the existing audit logs to Solr, follow the steps in Apache Ranger documention for 0.6 release.").build()),
+          "After upgrading, Ranger will no longer support the Audit to Database feature. Instead, Ranger will audit to Solr. " +
+          "To migrate the existing audit logs to Solr, follow the steps in Apache Ranger documention for 0.6 release.").build());
 
-  KAFKA_KERBEROS_CHECK(PrereqCheckType.SERVICE,
+  public static CheckDescription KAFKA_KERBEROS_CHECK = new CheckDescription("KAFKA_KERBEROS_CHECK",
+    PrereqCheckType.SERVICE,
     "Kafka upgrade on Kerberized cluster",
     new ImmutableMap.Builder<String, String>()
       .put(AbstractCheckDescriptor.DEFAULT,
-        "Kafka is currently not Kerberized, but your cluster is. After upgrading, Kafka will automatically be Kerberized for you.").build()),
+          "Kafka is currently not Kerberized, but your cluster is. After upgrading, Kafka will automatically be Kerberized for you.").build());
 
-  SERVICES_HIVE_ROLLING_PORT_WARNING(PrereqCheckType.SERVICE,
-      "Hive Server Port Change",
-      new ImmutableMap.Builder<String, String>()
+  public static CheckDescription SERVICES_HIVE_ROLLING_PORT_WARNING = new CheckDescription("SERVICES_HIVE_ROLLING_PORT_WARNING",
+    PrereqCheckType.SERVICE,
+    "Hive Server Port Change",
+    new ImmutableMap.Builder<String, String>()
       .put(AbstractCheckDescriptor.DEFAULT,
-        "In order to support rolling upgrades, the Hive server is required to change its port. Applications and users which use a URL that includes the port will no longer be able to connect after Hive has upgraded. If this behavior is not desired, then the port can be restored to its original value after the upgrade has been finalized.").build()),
+          "In order to support rolling upgrades, the Hive server is required to change its port. Applications and users which use a URL that includes the port will no longer be able to connect after Hive has upgraded. If this behavior is not desired, then the port can be restored to its original value after the upgrade has been finalized.").build());
   
-  SERVICES_STORM_ROLLING_WARNING(PrereqCheckType.SERVICE,
-      "Storm Downtime During Upgrade",
-      new ImmutableMap.Builder<String, String>()
+  public static CheckDescription SERVICES_STORM_ROLLING_WARNING = new CheckDescription("SERVICES_STORM_ROLLING_WARNING",
+    PrereqCheckType.SERVICE,
+    "Storm Downtime During Upgrade",
+    new ImmutableMap.Builder<String, String>()
       .put(AbstractCheckDescriptor.DEFAULT,
-        "Storm does not support rolling upgrades on this version of the stack. If you proceed, you will be required to stop all running topologies before Storm is restarted.").build());  
+          "Storm does not support rolling upgrades on this version of the stack. If you proceed, you will be required to stop all running topologies before Storm is restarted.").build());
 
 
+  private String m_name;
   private PrereqCheckType m_type;
   private String m_description;
   private Map<String, String> m_fails;
-  private CheckDescription(PrereqCheckType type, String description,
+  public CheckDescription(String name, PrereqCheckType type, String description,
       Map<String, String> fails) {
+	m_name = name;
     m_type = type;
     m_description = description;
     m_fails = fails;
   }
 
   /**
+   * @return the name of check
+   */
+  public String name() {
+    return m_name;
+  }
+
+  /**
    * @return the type of check
    */
   public PrereqCheckType getType() {


[04/10] ambari git commit: AMBARI-18237. Certain configuration files cannot be modified through Ambari api. (aonishuk)

Posted by nc...@apache.org.
AMBARI-18237. Certain configuration files cannot be modified through Ambari api. (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4b141dd8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4b141dd8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4b141dd8

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 4b141dd8842fe97d9cf8565af179ea2b68191729
Parents: 65530d3
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Sep 12 12:35:07 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Sep 12 12:35:07 2016 +0300

----------------------------------------------------------------------
 .../stacks/HDP/2.0.6/hooks/before-START/scripts/params.py         | 3 ++-
 .../HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py | 2 +-
 2 files changed, 3 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4b141dd8/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 49a14d0..45eab2f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -40,11 +40,12 @@ stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 dfs_type = default("/commandParams/dfs_type", "")
 hadoop_conf_dir = "/etc/hadoop/conf"
-
 component_list = default("/localComponents", [])
 
 hdfs_tmp_dir = config['configurations']['hadoop-env']['hdfs_tmp_dir']
 
+hadoop_metrics2_properties_content = config['configurations']['hadoop-metrics2.properties']['content']
+
 # hadoop default params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b141dd8/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
index ba9c8fb..ff52b31 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
@@ -101,7 +101,7 @@ def setup_hadoop():
       File(os.path.join(params.hadoop_conf_dir, "hadoop-metrics2.properties"),
            owner=params.hdfs_user,
            group=params.user_group,
-           content=Template("hadoop-metrics2.properties.j2")
+           content=InlineTemplate(params.hadoop_metrics2_properties_content)
       )
 
     if params.dfs_type == 'HCFS' and params.has_core_site and 'ECS_CLIENT' in params.component_list:


[10/10] ambari git commit: AMBARI-17728: Error message does not deliver when executing ambari-server command as a non-root use (wang yaoxin via jluniya)

Posted by nc...@apache.org.
AMBARI-17728: Error message does not deliver when executing ambari-server command as a non-root use (wang yaoxin via jluniya)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e4cb41e0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e4cb41e0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e4cb41e0

Branch: refs/heads/branch-dev-patch-upgrade
Commit: e4cb41e0ab469788180f3ac5741d331706b46ea0
Parents: 623c36d
Author: Jayush Luniya <jl...@hortonworks.com>
Authored: Mon Sep 12 09:50:49 2016 -0700
Committer: Jayush Luniya <jl...@hortonworks.com>
Committed: Mon Sep 12 09:50:49 2016 -0700

----------------------------------------------------------------------
 ambari-server/sbin/ambari-server | 7 +++++++
 1 file changed, 7 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e4cb41e0/ambari-server/sbin/ambari-server
----------------------------------------------------------------------
diff --git a/ambari-server/sbin/ambari-server b/ambari-server/sbin/ambari-server
index e055650..f52a598 100755
--- a/ambari-server/sbin/ambari-server
+++ b/ambari-server/sbin/ambari-server
@@ -55,6 +55,13 @@ AMBARI_ENV="$ROOT/var/lib/ambari-server/ambari-env.sh"
 AMBARI_PYTHON_EXECUTABLE="$ROOT/usr/sbin/ambari-server.py"
 AMBARI_EXECUTABLE="$ROOT/usr/sbin/ambari-server"
 
+current_user=`id -u -n`
+echo "" | sudo -S -l > /dev/null 2>&1
+if [ "$?" != "0" ] && [ "$EUID" -ne 0 ] ; then
+ echo "You can't perform this operation as non-sudoer user. Please re-login or configure sudo access for this user."
+ exit -1
+fi
+
 if [ -z "$PYTHON" ] ; then
   export PYTHON=`readlink $PYTHON_WRAP`
 fi


[03/10] ambari git commit: AMBARI-18353. Ambari Dashboard UI does not load intermittently. (jaimin)

Posted by nc...@apache.org.
AMBARI-18353. Ambari Dashboard UI does not load intermittently. (jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/65530d31
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/65530d31
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/65530d31

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 65530d315c79f1bd8f91c583b0c36ce4c18cc378
Parents: 2547d8f
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Sat Sep 10 10:26:01 2016 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Sat Sep 10 10:29:19 2016 -0700

----------------------------------------------------------------------
 ambari-web/app/mappers/service_mapper.js | 29 ++++++++++++++++++++++-----
 ambari-web/app/models/service.js         |  2 +-
 2 files changed, 25 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/65530d31/ambari-web/app/mappers/service_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/service_mapper.js b/ambari-web/app/mappers/service_mapper.js
index 9c21789..9a330f9 100644
--- a/ambari-web/app/mappers/service_mapper.js
+++ b/ambari-web/app/mappers/service_mapper.js
@@ -61,12 +61,31 @@ App.serviceMapper = App.QuickDataMapper.create({
       App.store.commit();
       this.set('initialAppLoad', true);
     }
-
-    for (var service in passiveStateMap) {
-      if (passiveStateMap.hasOwnProperty(service)) {
-        App.Service.find(service).set('passiveState', passiveStateMap[service]);
+    this.servicesLoading().done(function setMaintenanceState() {
+      for (var service in passiveStateMap) {
+        if (passiveStateMap.hasOwnProperty(service)) {
+          App.Service.find(service).set('passiveState', passiveStateMap[service]);
+        }
       }
-    }
+    });
+
     console.timeEnd("App.serviceMapper execution time");
+  },
+
+  servicesLoading: function () {
+    var dfd = $.Deferred();
+    var isAllServicesLoaded = App.store.findAll(App.Service).everyProperty('isLoaded', true);
+    if (isAllServicesLoaded) {
+      dfd.resolve();
+    } else {
+      var interval = setInterval(function checkIfServicesLoaded() {
+        var isAllServicesLoaded = App.store.findAll(App.Service).everyProperty('isLoaded', true);
+        if (isAllServicesLoaded) {
+          dfd.resolve();
+          clearInterval(interval);
+        }
+      }, 5);
+    }
+    return dfd.promise();
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/65530d31/ambari-web/app/models/service.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/service.js b/ambari-web/app/models/service.js
index c648e93..e9dc327 100644
--- a/ambari-web/app/models/service.js
+++ b/ambari-web/app/models/service.js
@@ -23,7 +23,7 @@ require('utils/config');
 App.Service = DS.Model.extend({
   serviceName: DS.attr('string'),
   displayName: Em.computed.formatRole('serviceName', true),
-  passiveState: DS.attr('string'),
+  passiveState: DS.attr('string', {defaultValue: "OFF"}),
   workStatus: DS.attr('string'),
   rand: DS.attr('string'),
   toolTipContent: DS.attr('string'),


[09/10] ambari git commit: AMBARI-18324: Externalize skip repo url check to ambari.properties instead of hardcoding it in Ambari Java code (dili)

Posted by nc...@apache.org.
AMBARI-18324: Externalize skip repo url check to ambari.properties instead of hardcoding it in Ambari Java code (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/623c36d2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/623c36d2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/623c36d2

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 623c36d2e0832d9a0bc60bc9ff19b302d5e3cefa
Parents: 2961c48
Author: Di Li <di...@apache.org>
Authored: Mon Sep 12 10:05:31 2016 -0400
Committer: Di Li <di...@apache.org>
Committed: Mon Sep 12 10:05:31 2016 -0400

----------------------------------------------------------------------
 .../server/configuration/Configuration.java     | 25 +++++++++++++++++++-
 .../RepositoryVersionResourceProvider.java      | 15 +++++++++++-
 .../server/configuration/ConfigurationTest.java | 20 ++++++++++++++++
 3 files changed, 58 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/623c36d2/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 0690ca8..ee73b8d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -2300,6 +2300,14 @@ public class Configuration {
   public static final ConfigurationProperty<Integer> TASK_ID_LIST_LIMIT = new ConfigurationProperty<>(
       "task.query.parameterlist.size", 999);
 
+  /**
+   * A comma separated list of repo ids to skip the repo url check when registering a repo for the stack
+   * @return
+   */
+  @Markdown(description = "The list of repo ids to skip the repo url check when registering a repo for the stack.")
+  public static final ConfigurationProperty<String> SKIP_REPO_URL_EXISTENCE_VALIDATION_LIST = new ConfigurationProperty<>(
+      "no.repo.existence.validation.list", "HDP-UTILS");
+
   private static final Logger LOG = LoggerFactory.getLogger(
     Configuration.class);
 
@@ -4825,6 +4833,22 @@ public class Configuration {
   }
 
   /**
+   * Default to HDP-UTILS
+   * */
+  public List<String> getSkipRepoUrlExistenceValidationList(){
+    List<String> list = new ArrayList<String>();
+    String propValue = getProperty(SKIP_REPO_URL_EXISTENCE_VALIDATION_LIST);
+    for (String repo: propValue.split(",")) {
+      repo = repo.trim();
+      if (!repo.isEmpty()) {
+        list.add(repo);
+      }
+    }
+    LOG.debug("Skip Repo URL Existence Validation on :" + list);
+    return list;
+  }
+
+  /**
    * Generates a markdown table which includes:
    * <ul>
    * <li>Property key name</li>
@@ -5187,5 +5211,4 @@ public class Configuration {
     ClusterSizeType clusterSize();
     String value();
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/623c36d2/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
index e440460..cd440f3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
@@ -32,6 +32,7 @@ import org.apache.ambari.server.ObjectNotFoundException;
 import org.apache.ambari.server.api.resources.OperatingSystemResourceDefinition;
 import org.apache.ambari.server.api.resources.RepositoryResourceDefinition;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
 import org.apache.ambari.server.controller.spi.Predicate;
@@ -457,11 +458,14 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
 
     // List of all repo urls that are already added at stack
     Set<String> existingRepoUrls = new HashSet<String>();
+    Configuration configuration = new Configuration();
+    List<String> skipRepos = configuration.getSkipRepoUrlExistenceValidationList();
     List<RepositoryVersionEntity> existingRepoVersions = dao.findByStack(requiredStack);
     for (RepositoryVersionEntity existingRepoVersion : existingRepoVersions) {
       for (OperatingSystemEntity operatingSystemEntity : existingRepoVersion.getOperatingSystems()) {
         for (RepositoryEntity repositoryEntity : operatingSystemEntity.getRepositories()) {
-          if (! repositoryEntity.getRepositoryId().startsWith("HDP-UTILS") &&  // HDP-UTILS is shared between repo versions
+          boolean toSkipRepo = isToSkip(repositoryEntity.getRepositoryId(), skipRepos);
+          if (! toSkipRepo && // HDP-UTILS is shared between repo versions
                   ! existingRepoVersion.getId().equals(repositoryVersion.getId())) { // Allow modifying already defined repo version
             existingRepoUrls.add(repositoryEntity.getBaseUrl());
           }
@@ -548,4 +552,13 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
     // This information is not associated with any particular resource
     return null;
   }
+
+  private static boolean isToSkip(String repoId, List<String> skipRepos){
+    for(String repo: skipRepos){
+      if (repoId.startsWith(repo)){
+        return true;
+      }
+    }
+    return false;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/623c36d2/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
index f9b76f8..f429a36 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
@@ -30,6 +30,7 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;
+import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
@@ -920,4 +921,23 @@ public class ConfigurationTest {
           StringUtils.isEmpty(markdown.description()));
     }
   }
+
+  /**
+   * Tests the default values for the {@link MetricsRetrievalService}.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testGetSkipRepoUrlExistenceValidationListDefaults() throws Exception {
+
+    final Properties ambariProperties = new Properties();
+    final Configuration configuration = new Configuration(ambariProperties);
+
+    List<String> skipRepos = configuration.getSkipRepoUrlExistenceValidationList();
+
+    // test defaults
+    Assert.assertEquals(skipRepos.size(), 1);
+    String hdpUtils = skipRepos.get(0);
+    Assert.assertTrue("HDP-UTILS".equals(hdpUtils));
+  }
 }


[06/10] ambari git commit: AMBARI-18357. Ambari fails to start nodemanager due to unexpected return code from sudo su command on the pid file (aonishuk)

Posted by nc...@apache.org.
AMBARI-18357. Ambari fails to start nodemanager due to unexpected return code from sudo su command on the pid file (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1a279102
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1a279102
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1a279102

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1a279102ebf5ec99a772ff3ca2c889df31ce8c60
Parents: e9b4f52
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Sep 12 12:58:43 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Sep 12 12:58:43 2016 +0300

----------------------------------------------------------------------
 .../YARN/2.1.0.2.0/package/scripts/service.py   |  4 ++--
 .../stacks/2.0.6/YARN/test_historyserver.py     | 18 ++++++++----------
 .../stacks/2.0.6/YARN/test_nodemanager.py       | 20 ++++++++------------
 .../stacks/2.0.6/YARN/test_resourcemanager.py   | 19 +++++++++----------
 .../stacks/2.1/YARN/test_apptimelineserver.py   | 20 +++++++++-----------
 5 files changed, 36 insertions(+), 45 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1a279102/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
index 3a4115c..78b2428 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
@@ -21,7 +21,7 @@ Ambari Agent
 
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
-from resource_management.core.shell import as_user
+from resource_management.core.shell import as_user, as_sudo
 from resource_management.libraries.functions.show_logs import show_logs
 from resource_management.libraries.functions.format import format
 from resource_management.core.resources.system import Execute, File
@@ -60,7 +60,7 @@ def service(componentName, action='start', serviceName='yarn'):
 
   if action == 'start':
     daemon_cmd = format("{ulimit_cmd} {cmd} start {componentName}")
-    check_process = as_user(format("ls {pid_file} && ps -p `cat {pid_file}`"), user=usr)
+    check_process = as_sudo(["test", "-f", pid_file]) + " && " + as_sudo(["pgrep", "-F", pid_file])
 
     # Remove the pid file if its corresponding process is not running.
     File(pid_file, action = "delete", not_if = check_process)

http://git-wip-us.apache.org/repos/asf/ambari/blob/1a279102/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index ef79f10..9ce5530 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -114,14 +114,14 @@ class TestHistoryServer(RMFTestCase):
 
     self.assertResourceCalled('File', '/var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su mapred -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ps -p `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid',
     )
     self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-mapreduce/sbin/mr-jobhistory-daemon.sh --config /etc/hadoop/conf start historyserver',
-        not_if = "ambari-sudo.sh su mapred -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ps -p `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid',
         user = 'mapred',
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su mapred -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ps -p `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid`'",
-        not_if = "ambari-sudo.sh su mapred -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ps -p `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid`'",
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid',
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid',
         tries = 5,
         try_sleep = 1,
     )
@@ -165,18 +165,16 @@ class TestHistoryServer(RMFTestCase):
 
     self.assert_configure_secured()
 
-    pid_check_cmd = 'ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid` >/dev/null 2>&1'
-
     self.assertResourceCalled('File', '/var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su mapred -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ps -p `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid',
     )
     self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-mapreduce/sbin/mr-jobhistory-daemon.sh --config /etc/hadoop/conf start historyserver',
-        not_if = "ambari-sudo.sh su mapred -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ps -p `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid',
         user = 'mapred',
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su mapred -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ps -p `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid`'",
-        not_if = "ambari-sudo.sh su mapred -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ps -p `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid`'",
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid',
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid',
         tries = 5,
         try_sleep = 1,
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/1a279102/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index 06b976e..4abf2c9 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -55,18 +55,16 @@ class TestNodeManager(RMFTestCase):
     )
     self.assert_configure_default()
 
-    pid_check_cmd = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid` >/dev/null 2>&1'
-
     self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
     )
     self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start nodemanager',
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
         user = 'yarn',
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
         tries = 5,
         try_sleep = 1,
     )
@@ -107,18 +105,16 @@ class TestNodeManager(RMFTestCase):
 
     self.assert_configure_secured()
 
-    pid_check_cmd = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid` >/dev/null 2>&1'
-
     self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
     )
     self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start nodemanager',
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
         user = 'yarn',
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
         tries = 5,
         try_sleep = 1,
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/1a279102/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
index a0e65cd..7b5ce18 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
@@ -58,17 +58,17 @@ class TestResourceManager(RMFTestCase):
     )
 
     self.assert_configure_default()
-
+    
     self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
     )
     self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start resourcemanager',
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
         user = 'yarn',
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid`'",
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid`'",
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
         tries = 5,
         try_sleep = 1,
     )
@@ -109,17 +109,16 @@ class TestResourceManager(RMFTestCase):
 
     self.assert_configure_secured()
 
-    pid_check_cmd = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1'
     self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
     )
     self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start resourcemanager',
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
         user = 'yarn',
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid`'",
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid`'",
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
         tries = 5,
         try_sleep = 1,
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/1a279102/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
index f56907c..dc11ba9 100644
--- a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
+++ b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
@@ -55,25 +55,23 @@ class TestAppTimelineServer(RMFTestCase):
     )
 
     self.assert_configure_default()
-
+    
     self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid',
     )
-
     self.assertResourceCalled('File', '/var/log/hadoop-yarn/timeline/leveldb-timeline-store.ldb/LOCK',
-        only_if='ls /var/log/hadoop-yarn/timeline/leveldb-timeline-store.ldb/LOCK',
-        action=['delete'],
-        not_if="ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid`'",
-        ignore_failures=True
+        action = ['delete'],
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid',
+        ignore_failures = True,
+        only_if = 'ls /var/log/hadoop-yarn/timeline/leveldb-timeline-store.ldb/LOCK',
     )
-
     self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start timelineserver',
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid`'",
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid',
         user = 'yarn',
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid`'",
-        not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid`'",
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid',
+        not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop-yarn/yarn/yarn-yarn-timelineserver.pid',
         tries = 5,
         try_sleep = 1,
     )


[05/10] ambari git commit: AMBARI-18356. ZEPPELIN user won't get created in ZEPPELIN group (aonishuk)

Posted by nc...@apache.org.
AMBARI-18356. ZEPPELIN user won't get created in ZEPPELIN group (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e9b4f529
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e9b4f529
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e9b4f529

Branch: refs/heads/branch-dev-patch-upgrade
Commit: e9b4f5298123d98a062f458604a2ae322405a282
Parents: 4b141dd
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Sep 12 12:49:21 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Sep 12 12:49:21 2016 +0300

----------------------------------------------------------------------
 .../stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py        | 6 ++++++
 1 file changed, 6 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e9b4f529/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 5544085..8b52ca1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -169,6 +169,8 @@ tez_user = config['configurations']['tez-env']["tez_user"]
 oozie_user = config['configurations']['oozie-env']["oozie_user"]
 falcon_user = config['configurations']['falcon-env']["falcon_user"]
 ranger_user = config['configurations']['ranger-env']["ranger_user"]
+zeppelin_user = config['configurations']['zeppelin-env']["zeppelin_user"]
+zeppelin_group = config['configurations']['zeppelin-env']["zeppelin_group"]
 
 user_group = config['configurations']['cluster-env']['user_group']
 
@@ -178,6 +180,7 @@ hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
 oozie_servers = default("/clusterHostInfo/oozie_server", [])
 falcon_server_hosts = default("/clusterHostInfo/falcon_server_hosts", [])
 ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
+zeppelin_master_hosts = default("/clusterHostInfo/zeppelin_master_hosts", [])
 
 has_namenode = not len(namenode_host) == 0
 has_ganglia_server = not len(ganglia_server_hosts) == 0
@@ -186,6 +189,7 @@ has_hbase_masters = not len(hbase_master_hosts) == 0
 has_oozie_server = not len(oozie_servers) == 0
 has_falcon_server_hosts = not len(falcon_server_hosts) == 0
 has_ranger_admin = not len(ranger_admin_hosts) == 0
+has_zeppelin_master = not len(zeppelin_master_hosts) == 0
 
 if has_namenode or dfs_type == 'HCFS':
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
@@ -219,6 +223,8 @@ if has_falcon_server_hosts:
   user_to_groups_dict[falcon_user] = [proxyuser_group]
 if has_ranger_admin:
   user_to_groups_dict[ranger_user] = [ranger_group]
+if has_zeppelin_master:
+  user_to_groups_dict[zeppelin_user] = [zeppelin_group, user_group]
 
 user_to_gid_dict = collections.defaultdict(lambda:user_group)