You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2014/09/13 01:53:29 UTC

[01/30] git commit: AMBARI-7258 Slider View: FE - Make ganglia monitoring optional when creating app, second patch. (atkach)

Repository: ambari
Updated Branches:
  refs/heads/branch-alerts-dev 853497f8e -> aaf051357


AMBARI-7258 Slider View: FE - Make ganglia monitoring optional when creating app, second patch. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8778556f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8778556f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8778556f

Branch: refs/heads/branch-alerts-dev
Commit: 8778556f438f35c650912ea32e375c5ba0619986
Parents: 3035978
Author: atkach <at...@hortonworks.com>
Authored: Thu Sep 11 22:08:20 2014 +0300
Committer: atkach <at...@hortonworks.com>
Committed: Thu Sep 11 22:08:20 2014 +0300

----------------------------------------------------------------------
 .../ui/app/controllers/createAppWizard/step3_controller.js       | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8778556f/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step3_controller.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step3_controller.js b/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step3_controller.js
index 0377388..a2eaa2e 100644
--- a/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step3_controller.js
+++ b/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step3_controller.js
@@ -127,8 +127,8 @@ App.CreateAppWizardStep3Controller = Ember.ObjectController.extend({
     Object.keys(newAppConfigs).forEach(function (key) {
       var label = (!!key.match('^site.')) ? key.substr(5) : key;
       var configSetting = (configSettings[key]) ?
-        $.extend({name: key, value: configs[key], label: label}, configSettings[key]) :
-        {name: key, value: configs[key], label: label};
+        $.extend({name: key, value: newAppConfigs[key], label: label}, configSettings[key]) :
+        {name: key, value: newAppConfigs[key], label: label};
 
       if (key === "site.global.ganglia_server_host" && !!setDefaults && App.get('gangliaHost')) {
         configSetting.value = App.get('gangliaHost');


[17/30] git commit: AMBARI-7260. Total space Utilization metric is not displaying on stack 1.3 (aonishuk)

Posted by jo...@apache.org.
AMBARI-7260. Total space Utilization metric is not displaying on stack 1.3 (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/941b56a3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/941b56a3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/941b56a3

Branch: refs/heads/branch-alerts-dev
Commit: 941b56a39aff7c53514d235e86143a8269b1b0a5
Parents: 6d6f4dd
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Fri Sep 12 17:00:53 2014 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Fri Sep 12 17:00:53 2014 +0300

----------------------------------------------------------------------
 .../src/main/resources/ganglia_properties.json  | 40 ++++++++++++++++++++
 1 file changed, 40 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/941b56a3/ambari-server/src/main/resources/ganglia_properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/ganglia_properties.json b/ambari-server/src/main/resources/ganglia_properties.json
index 3fe9460..e64309a 100644
--- a/ambari-server/src/main/resources/ganglia_properties.json
+++ b/ambari-server/src/main/resources/ganglia_properties.json
@@ -1649,6 +1649,26 @@
         "pointInTime":false,
         "temporal":true
       },
+      "metrics/dfs/FSNamesystem/CapacityRemaining":{
+        "metric":"dfs.FSNamesystem.CapacityRemaining",
+        "pointInTime":false,
+        "temporal":true
+      },
+      "metrics/dfs/FSNamesystem/CapacityTotal":{
+        "metric":"dfs.FSNamesystem.CapacityTotal",
+        "pointInTime":false,
+        "temporal":true
+      },
+      "metrics/dfs/FSNamesystem/CapacityUsed":{
+        "metric":"dfs.FSNamesystem.CapacityUsed",
+        "pointInTime":false,
+        "temporal":true
+      },
+      "metrics/dfs/FSNamesystem/CapacityNonDFSUsed": {
+        "metric": "dfs.FSNamesystem.CapacityUsedNonDFS",
+        "pointInTime": false,
+        "temporal": true
+      },
       "metrics/dfs/FSNamesystem/CorruptBlocks":{
         "metric":"dfs.FSNamesystem.CorruptBlocks",
         "pointInTime":false,
@@ -11115,6 +11135,26 @@
         "pointInTime":false,
         "temporal":true
       },
+      "metrics/dfs/FSNamesystem/CapacityRemaining":{
+        "metric":"dfs.FSNamesystem.CapacityRemaining",
+        "pointInTime":false,
+        "temporal":true
+      },
+      "metrics/dfs/FSNamesystem/CapacityTotal":{
+        "metric":"dfs.FSNamesystem.CapacityTotal",
+        "pointInTime":false,
+        "temporal":true
+      },
+      "metrics/dfs/FSNamesystem/CapacityUsed":{
+        "metric":"dfs.FSNamesystem.CapacityUsed",
+        "pointInTime":false,
+        "temporal":true
+      },
+      "metrics/dfs/FSNamesystem/CapacityNonDFSUsed": {
+        "metric": "dfs.FSNamesystem.CapacityUsedNonDFS",
+        "pointInTime": false,
+        "temporal": true
+      },
       "metrics/dfs/FSNamesystem/CorruptBlocks":{
         "metric":"dfs.FSNamesystem.CorruptBlocks",
         "pointInTime":false,


[06/30] git commit: AMBARI-7242. Make Nagios client dependencies conditional on client service being deployed.

Posted by jo...@apache.org.
AMBARI-7242.  Make Nagios client dependencies conditional on client service being deployed.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9d201f54
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9d201f54
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9d201f54

Branch: refs/heads/branch-alerts-dev
Commit: 9d201f548b3cc77745e6a7616cc2e7be8920dbe1
Parents: d961ca0
Author: Robert Nettleton <rn...@hortonworks.com>
Authored: Thu Sep 11 16:10:27 2014 -0400
Committer: John Speidel <js...@hortonworks.com>
Committed: Thu Sep 11 16:12:09 2014 -0400

----------------------------------------------------------------------
 .../internal/BaseBlueprintProcessor.java        |  13 +-
 .../internal/BaseBlueprintProcessorTest.java    | 413 +++++++++++++++++++
 2 files changed, 425 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9d201f54/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
index 9c10ac1..5a99af8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
@@ -595,6 +595,11 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
       return version;
     }
 
+
+    Map<DependencyInfo, String> getDependencyConditionalServiceMap() {
+      return dependencyConditionalServiceMap;
+    }
+
     /**
      * Get services contained in the stack.
      *
@@ -842,13 +847,19 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
      * Register conditional dependencies.
      */
     //todo: This information should be specified in the stack definition.
-    private void registerConditionalDependencies() {
+    void registerConditionalDependencies() {
       Collection<DependencyInfo> nagiosDependencies = getDependenciesForComponent("NAGIOS_SERVER");
       for (DependencyInfo dependency : nagiosDependencies) {
         if (dependency.getComponentName().equals("HCAT")) {
           dependencyConditionalServiceMap.put(dependency, "HCATALOG");
         } else if (dependency.getComponentName().equals("OOZIE_CLIENT")) {
           dependencyConditionalServiceMap.put(dependency, "OOZIE");
+        } else if (dependency.getComponentName().equals("YARN_CLIENT")) {
+          dependencyConditionalServiceMap.put(dependency, "YARN");
+        } else if (dependency.getComponentName().equals("TEZ_CLIENT")) {
+          dependencyConditionalServiceMap.put(dependency, "TEZ");
+        } else if (dependency.getComponentName().equals("MAPREDUCE2_CLIENT")) {
+          dependencyConditionalServiceMap.put(dependency, "MAPREDUCE2");
         }
       }
       dbDependencyInfo.put("MYSQL_SERVER", "global/hive_database");

http://git-wip-us.apache.org/repos/asf/ambari/blob/9d201f54/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
new file mode 100644
index 0000000..ffe2ea0
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
@@ -0,0 +1,413 @@
+package org.apache.ambari.server.controller.internal;
+
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.StackServiceResponse;
+import org.apache.ambari.server.state.DependencyInfo;
+import org.easymock.EasyMockSupport;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.isA;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+import static org.junit.Assert.*;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+public class BaseBlueprintProcessorTest {
+
+  @Before
+  public void setUp() throws Exception {
+    BaseBlueprintProcessor.stackInfo = null;
+  }
+
+
+  @Test
+  public void testStackRegisterConditionalDependencies() throws Exception {
+    EasyMockSupport mockSupport = new EasyMockSupport();
+    AmbariManagementController mockMgmtController =
+      mockSupport.createMock(AmbariManagementController.class);
+
+    // setup mock expectations
+    expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
+
+    // test dependencies
+    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
+    final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
+    final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
+    final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
+
+    mockSupport.replayAll();
+
+    // create stack for testing
+    BaseBlueprintProcessor.Stack testStack =
+      new BaseBlueprintProcessor.Stack("HDP", "2.1", mockMgmtController) {
+        @Override
+        public Collection<DependencyInfo> getDependenciesForComponent(String component) {
+          // simulate the dependencies in a given stack by overriding this method
+          if (component.equals("NAGIOS_SERVER")) {
+            Set<DependencyInfo> setOfDependencies = new HashSet<DependencyInfo>();
+
+            setOfDependencies.add(hCatDependency);
+            setOfDependencies.add(yarnClientDependency);
+            setOfDependencies.add(tezClientDependency);
+            setOfDependencies.add(mapReduceTwoClientDependency);
+            setOfDependencies.add(oozieClientDependency);
+
+            return setOfDependencies;
+          }
+
+            return Collections.emptySet();
+        }
+      };
+
+    assertEquals("Initial conditional dependency map should be empty",
+                 0, testStack.getDependencyConditionalServiceMap().size());
+
+    testStack.registerConditionalDependencies();
+
+    assertEquals("Set of conditional service mappings is an incorrect size",
+                 5, testStack.getDependencyConditionalServiceMap().size());
+
+    assertEquals("Incorrect service dependency for HCAT",
+                 "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+    assertEquals("Incorrect service dependency for YARN_CLIENT",
+                 "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
+    assertEquals("Incorrect service dependency for TEZ_CLIENT",
+                 "TEZ", testStack.getDependencyConditionalServiceMap().get(tezClientDependency));
+    assertEquals("Incorrect service dependency for MAPREDUCE2_CLIENT",
+                 "MAPREDUCE2", testStack.getDependencyConditionalServiceMap().get(mapReduceTwoClientDependency));
+    assertEquals("Incorrect service dependency for OOZIE_CLIENT",
+                 "OOZIE", testStack.getDependencyConditionalServiceMap().get(oozieClientDependency));
+
+    mockSupport.verifyAll();
+  }
+
+
+  @Test
+  public void testStackRegisterConditionalDependenciesNoHCAT() throws Exception {
+    EasyMockSupport mockSupport = new EasyMockSupport();
+    AmbariManagementController mockMgmtController =
+      mockSupport.createMock(AmbariManagementController.class);
+
+    // setup mock expectations
+    expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
+
+    // test dependencies
+    final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
+    final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
+    final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
+    final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
+
+    mockSupport.replayAll();
+
+    // create stack for testing
+    BaseBlueprintProcessor.Stack testStack =
+      new BaseBlueprintProcessor.Stack("HDP", "2.1", mockMgmtController) {
+        @Override
+        public Collection<DependencyInfo> getDependenciesForComponent(String component) {
+          // simulate the dependencies in a given stack by overriding this method
+          if (component.equals("NAGIOS_SERVER")) {
+            Set<DependencyInfo> setOfDependencies = new HashSet<DependencyInfo>();
+
+            setOfDependencies.add(yarnClientDependency);
+            setOfDependencies.add(tezClientDependency);
+            setOfDependencies.add(mapReduceTwoClientDependency);
+            setOfDependencies.add(oozieClientDependency);
+
+            return setOfDependencies;
+          }
+
+          return Collections.emptySet();
+        }
+      };
+
+    assertEquals("Initial conditional dependency map should be empty",
+      0, testStack.getDependencyConditionalServiceMap().size());
+
+    testStack.registerConditionalDependencies();
+
+    assertEquals("Set of conditional service mappings is an incorrect size",
+      4, testStack.getDependencyConditionalServiceMap().size());
+
+    assertEquals("Incorrect service dependency for YARN_CLIENT",
+      "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
+    assertEquals("Incorrect service dependency for TEZ_CLIENT",
+      "TEZ", testStack.getDependencyConditionalServiceMap().get(tezClientDependency));
+    assertEquals("Incorrect service dependency for MAPREDUCE2_CLIENT",
+      "MAPREDUCE2", testStack.getDependencyConditionalServiceMap().get(mapReduceTwoClientDependency));
+    assertEquals("Incorrect service dependency for OOZIE_CLIENT",
+      "OOZIE", testStack.getDependencyConditionalServiceMap().get(oozieClientDependency));
+
+    mockSupport.verifyAll();
+  }
+
+
+  @Test
+  public void testStackRegisterConditionalDependenciesNoYarnClient() throws Exception {
+    EasyMockSupport mockSupport = new EasyMockSupport();
+    AmbariManagementController mockMgmtController =
+      mockSupport.createMock(AmbariManagementController.class);
+
+    // setup mock expectations
+    expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
+
+    // test dependencies
+    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
+    final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
+    final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
+
+    mockSupport.replayAll();
+
+    // create stack for testing
+    BaseBlueprintProcessor.Stack testStack =
+      new BaseBlueprintProcessor.Stack("HDP", "2.1", mockMgmtController) {
+        @Override
+        public Collection<DependencyInfo> getDependenciesForComponent(String component) {
+          // simulate the dependencies in a given stack by overriding this method
+          if (component.equals("NAGIOS_SERVER")) {
+            Set<DependencyInfo> setOfDependencies = new HashSet<DependencyInfo>();
+
+            setOfDependencies.add(hCatDependency);
+            setOfDependencies.add(tezClientDependency);
+            setOfDependencies.add(mapReduceTwoClientDependency);
+            setOfDependencies.add(oozieClientDependency);
+
+            return setOfDependencies;
+          }
+
+          return Collections.emptySet();
+        }
+      };
+
+    assertEquals("Initial conditional dependency map should be empty",
+      0, testStack.getDependencyConditionalServiceMap().size());
+
+    testStack.registerConditionalDependencies();
+
+    assertEquals("Set of conditional service mappings is an incorrect size",
+      4, testStack.getDependencyConditionalServiceMap().size());
+
+    assertEquals("Incorrect service dependency for HCAT",
+      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+    assertEquals("Incorrect service dependency for TEZ_CLIENT",
+      "TEZ", testStack.getDependencyConditionalServiceMap().get(tezClientDependency));
+    assertEquals("Incorrect service dependency for MAPREDUCE2_CLIENT",
+      "MAPREDUCE2", testStack.getDependencyConditionalServiceMap().get(mapReduceTwoClientDependency));
+    assertEquals("Incorrect service dependency for OOZIE_CLIENT",
+      "OOZIE", testStack.getDependencyConditionalServiceMap().get(oozieClientDependency));
+
+    mockSupport.verifyAll();
+  }
+
+
+  @Test
+  public void testStackRegisterConditionalDependenciesNoTezClient() throws Exception {
+    EasyMockSupport mockSupport = new EasyMockSupport();
+    AmbariManagementController mockMgmtController =
+      mockSupport.createMock(AmbariManagementController.class);
+
+    // setup mock expectations
+    expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
+
+    // test dependencies
+    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
+    final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
+    final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
+
+    mockSupport.replayAll();
+
+    // create stack for testing
+    BaseBlueprintProcessor.Stack testStack =
+      new BaseBlueprintProcessor.Stack("HDP", "2.1", mockMgmtController) {
+        @Override
+        public Collection<DependencyInfo> getDependenciesForComponent(String component) {
+          // simulate the dependencies in a given stack by overriding this method
+          if (component.equals("NAGIOS_SERVER")) {
+            Set<DependencyInfo> setOfDependencies = new HashSet<DependencyInfo>();
+
+            setOfDependencies.add(hCatDependency);
+            setOfDependencies.add(yarnClientDependency);
+            setOfDependencies.add(mapReduceTwoClientDependency);
+            setOfDependencies.add(oozieClientDependency);
+
+            return setOfDependencies;
+          }
+
+          return Collections.emptySet();
+        }
+      };
+
+    assertEquals("Initial conditional dependency map should be empty",
+      0, testStack.getDependencyConditionalServiceMap().size());
+
+    testStack.registerConditionalDependencies();
+
+    assertEquals("Set of conditional service mappings is an incorrect size",
+      4, testStack.getDependencyConditionalServiceMap().size());
+
+    assertEquals("Incorrect service dependency for HCAT",
+      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+    assertEquals("Incorrect service dependency for YARN_CLIENT",
+      "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
+    assertEquals("Incorrect service dependency for MAPREDUCE2_CLIENT",
+      "MAPREDUCE2", testStack.getDependencyConditionalServiceMap().get(mapReduceTwoClientDependency));
+    assertEquals("Incorrect service dependency for OOZIE_CLIENT",
+      "OOZIE", testStack.getDependencyConditionalServiceMap().get(oozieClientDependency));
+
+    mockSupport.verifyAll();
+  }
+
+
+  @Test
+  public void testStackRegisterConditionalDependenciesNoMapReduceClient() throws Exception {
+    EasyMockSupport mockSupport = new EasyMockSupport();
+    AmbariManagementController mockMgmtController =
+      mockSupport.createMock(AmbariManagementController.class);
+
+    // setup mock expectations
+    expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
+
+    // test dependencies
+    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
+    final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
+    final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
+
+    mockSupport.replayAll();
+
+    // create stack for testing
+    BaseBlueprintProcessor.Stack testStack =
+      new BaseBlueprintProcessor.Stack("HDP", "2.1", mockMgmtController) {
+        @Override
+        public Collection<DependencyInfo> getDependenciesForComponent(String component) {
+          // simulate the dependencies in a given stack by overriding this method
+          if (component.equals("NAGIOS_SERVER")) {
+            Set<DependencyInfo> setOfDependencies = new HashSet<DependencyInfo>();
+
+            setOfDependencies.add(hCatDependency);
+            setOfDependencies.add(yarnClientDependency);
+            setOfDependencies.add(tezClientDependency);
+            setOfDependencies.add(oozieClientDependency);
+
+            return setOfDependencies;
+          }
+
+          return Collections.emptySet();
+        }
+      };
+
+    assertEquals("Initial conditional dependency map should be empty",
+      0, testStack.getDependencyConditionalServiceMap().size());
+
+    testStack.registerConditionalDependencies();
+
+    assertEquals("Set of conditional service mappings is an incorrect size",
+      4, testStack.getDependencyConditionalServiceMap().size());
+
+    assertEquals("Incorrect service dependency for HCAT",
+      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+    assertEquals("Incorrect service dependency for YARN_CLIENT",
+      "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
+    assertEquals("Incorrect service dependency for TEZ_CLIENT",
+      "TEZ", testStack.getDependencyConditionalServiceMap().get(tezClientDependency));
+    assertEquals("Incorrect service dependency for OOZIE_CLIENT",
+      "OOZIE", testStack.getDependencyConditionalServiceMap().get(oozieClientDependency));
+
+    mockSupport.verifyAll();
+  }
+
+
+  @Test
+  public void testStackRegisterConditionalDependenciesNoOozieClient() throws Exception {
+    EasyMockSupport mockSupport = new EasyMockSupport();
+    AmbariManagementController mockMgmtController =
+      mockSupport.createMock(AmbariManagementController.class);
+
+    // setup mock expectations
+    expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
+
+    // test dependencies
+    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
+    final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
+    final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
+
+    mockSupport.replayAll();
+
+    // create stack for testing
+    BaseBlueprintProcessor.Stack testStack =
+      new BaseBlueprintProcessor.Stack("HDP", "2.1", mockMgmtController) {
+        @Override
+        public Collection<DependencyInfo> getDependenciesForComponent(String component) {
+          // simulate the dependencies in a given stack by overriding this method
+          if (component.equals("NAGIOS_SERVER")) {
+            Set<DependencyInfo> setOfDependencies = new HashSet<DependencyInfo>();
+
+            setOfDependencies.add(hCatDependency);
+            setOfDependencies.add(yarnClientDependency);
+            setOfDependencies.add(tezClientDependency);
+            setOfDependencies.add(mapReduceTwoClientDependency);
+
+            return setOfDependencies;
+          }
+
+          return Collections.emptySet();
+        }
+      };
+
+    assertEquals("Initial conditional dependency map should be empty",
+      0, testStack.getDependencyConditionalServiceMap().size());
+
+    testStack.registerConditionalDependencies();
+
+    assertEquals("Set of conditional service mappings is an incorrect size",
+      4, testStack.getDependencyConditionalServiceMap().size());
+
+    assertEquals("Incorrect service dependency for HCAT",
+      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+    assertEquals("Incorrect service dependency for YARN_CLIENT",
+      "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
+    assertEquals("Incorrect service dependency for TEZ_CLIENT",
+      "TEZ", testStack.getDependencyConditionalServiceMap().get(tezClientDependency));
+    assertEquals("Incorrect service dependency for MAPREDUCE2_CLIENT",
+      "MAPREDUCE2", testStack.getDependencyConditionalServiceMap().get(mapReduceTwoClientDependency));
+
+    mockSupport.verifyAll();
+  }
+
+
+  /**
+   * Convenience class for easier setup/initialization of dependencies
+   * for unit testing.
+   */
+  private static class TestDependencyInfo extends DependencyInfo {
+    TestDependencyInfo(String dependencyName) {
+      setName(dependencyName);
+    }
+  }
+}
\ No newline at end of file


[27/30] git commit: AMBARI-7287. Slider View: Unable to see added config in create slider app wizard (alexantonenko)

Posted by jo...@apache.org.
AMBARI-7287. Slider View: Unable to see added config in create slider app wizard (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/85a89775
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/85a89775
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/85a89775

Branch: refs/heads/branch-alerts-dev
Commit: 85a8977582171013394604a0308575267c96cbb1
Parents: fc569f1
Author: Alex Antonenko <hi...@gmail.com>
Authored: Fri Sep 12 21:01:24 2014 +0300
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Sat Sep 13 01:30:49 2014 +0300

----------------------------------------------------------------------
 .../slider/src/main/resources/ui/app/components/configSection.js   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/85a89775/contrib/views/slider/src/main/resources/ui/app/components/configSection.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/components/configSection.js b/contrib/views/slider/src/main/resources/ui/app/components/configSection.js
index bcdff12..a1608d2 100644
--- a/contrib/views/slider/src/main/resources/ui/app/components/configSection.js
+++ b/contrib/views/slider/src/main/resources/ui/app/components/configSection.js
@@ -146,7 +146,7 @@ App.ConfigSectionComponent = Em.Component.extend({
         });
         return;
       }
-      this.get('config').pushObject({name: name, value: value, label: name});
+      this.get('config').pushObject(App.ConfigProperty.create({name: name, value: value, label: name}));
       this.cleanNewConfig();
       this.toggleProperty('buttonVisible');
     },


[19/30] git commit: AMBARI-7281. Slider View: App summary details shown differently than Ambari service summary. (onechiporenko)

Posted by jo...@apache.org.
AMBARI-7281. Slider View: App summary details shown differently than Ambari service summary. (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1b52db2a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1b52db2a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1b52db2a

Branch: refs/heads/branch-alerts-dev
Commit: 1b52db2a467ee0600eea67907abeeb7e51cfcf7d
Parents: 2fbbfb3
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Fri Sep 12 17:26:55 2014 +0300
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Fri Sep 12 17:26:55 2014 +0300

----------------------------------------------------------------------
 .../resources/ui/app/styles/application.less    |  26 ++-
 .../ui/app/templates/slider_app/summary.hbs     | 200 +++++++++----------
 .../ui/app/views/slider_app/summary_view.js     |   4 +-
 3 files changed, 119 insertions(+), 111 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1b52db2a/contrib/views/slider/src/main/resources/ui/app/styles/application.less
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/styles/application.less b/contrib/views/slider/src/main/resources/ui/app/styles/application.less
index f2d6260..b415da0 100644
--- a/contrib/views/slider/src/main/resources/ui/app/styles/application.less
+++ b/contrib/views/slider/src/main/resources/ui/app/styles/application.less
@@ -529,19 +529,27 @@ a {
  * App Summary Page
  */
 .app_summary {
-  .container {
-    padding-left: 0;
-    .panel-summury {
-      min-height: 400px;
-      table  tr td:last-child {
-        word-break: break-word;
+  padding-left: 0;
+  table {
+    &.no-borders {
+      td {
+        border-width: 0;
       }
     }
-    .panel-link {
-      margin-top: -5px;
-      margin-right: -7px;
+  }
+  .panel-heading {
+    font-weight: 700;
+  }
+  .panel-summary {
+    min-height: 400px;
+    td:first-child {
+      text-align: right;
     }
   }
+  .panel-link {
+    margin-top: -5px;
+    margin-right: -7px;
+  }
 }
 
 .chart-container {

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b52db2a/contrib/views/slider/src/main/resources/ui/app/templates/slider_app/summary.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/templates/slider_app/summary.hbs b/contrib/views/slider/src/main/resources/ui/app/templates/slider_app/summary.hbs
index cf86dc5..5cb2cd1 100644
--- a/contrib/views/slider/src/main/resources/ui/app/templates/slider_app/summary.hbs
+++ b/contrib/views/slider/src/main/resources/ui/app/templates/slider_app/summary.hbs
@@ -16,113 +16,113 @@
 * limitations under the License.
 }}
 
-<div class="container">
-  <div class="row">
-    <div class="col-md-6">
-      {{#bs-panel heading="Summary" class="panel-summury"}}
-        <table class="table table-striped table-bordered table-condensed">
-          <tbody>
-            <tr>
-              <td>{{t common.status}}</td>
-              <td>{{controller.model.status}}</td>
-            </tr>
-            <tr>
-              <td>{{t common.type}}</td>
-              <td>{{controller.appType}}</td>
-            </tr>
-            <tr>
-              <td>{{t common.started}}</td>
-              <td>{{formatWordBreak controller.model.started}}</td>
-            </tr>
-            <tr>
-              <td>{{t common.finished}}</td>
-              <td>{{formatWordBreak controller.model.ended}}</td>
-            </tr>
-            <tr>
-              <td>{{t common.diagnostics}}</td>
-              <td>{{formatWordBreak controller.model.diagnostics devider="."}}</td>
-            </tr>
-            {{#each controller.model.jmx}}
-              <tr>
-                <td>{{humanize key}}</td>
-                <td>{{formatWordBreak value}}</td>
-              </tr>
-            {{/each}}
-          </tbody>
-        </table>
-      {{/bs-panel}}
-    </div>
-    <div class="col-md-6">
-      {{#bs-panel heading="Components" class="panel-components"}}
-          <table class="table table-striped table-bordered table-condensed">
-              <tbody>
-              {{#each controller.model.components}}
-                <tr>
-                  <td>{{componentName}}</td>
-                  <td>{{host}}</td>
-                </tr>
-              {{/each}}
-              </tbody>
-          </table>
-      {{/bs-panel}}
-      <div class="panel panel-default panel-alerts">
-        <div class="panel-heading">
-          {{t common.alerts}}
-          <div class="btn-group pull-right panel-link">
-{{!--            <a class="btn btn-default btn-sm" target="_blank" rel="tooltip"
-              {{translateAttr title="sliderApp.summary.go_to_nagios"}}
-              {{bind-attr href="view.nagiosUrl"}}>
-                <i class="icon-link"></i>
-            </a>--}}
-          </div>
-        </div>
-        <div class="app-alerts">
-            <ul>
-              {{#each controller.model.alerts}}
-                {{#view view.AlertView contentBinding="this"}}
-                      <div class="container-fluid">
-                          <div class="row">
-                              <div class="col-md-1 status-icon">
-                                  <i {{bind-attr class="iconClass :icon-large"}}></i>
-                              </div>
-                              <div class="col-md-11">
-                                  <div class="row">
-                                      <div class="col-md-7 title">{{title}}</div>
 
-                                      <div {{bs-bind-tooltip view.tooltip}} data-placement="right" class="col-md-5 date-time">{{timeSinceAlert}}</div>
-                                  </div>
-                                  <div class="message">{{message}}</div>
-                              </div>
-                          </div>
-                      </div>
-                  {{/view}}
-              {{/each}}
-            </ul>
-        </div>
-      </div>
-    </div>
+<div class="row">
+  <div class="col-md-6">
+    {{#bs-panel heading="Summary" class="panel-summary"}}
+      <table class="table no-borders table-condensed">
+        <tbody>
+        <tr>
+          <td>{{t common.status}}</td>
+          <td>{{controller.model.status}}</td>
+        </tr>
+        <tr>
+          <td>{{t common.type}}</td>
+          <td>{{controller.appType}}</td>
+        </tr>
+        <tr>
+          <td>{{t common.started}}</td>
+          <td>{{formatWordBreak controller.model.started}}</td>
+        </tr>
+        <tr>
+          <td>{{t common.finished}}</td>
+          <td>{{formatWordBreak controller.model.ended}}</td>
+        </tr>
+        <tr>
+          <td>{{t common.diagnostics}}</td>
+          <td>{{formatWordBreak controller.model.diagnostics devider="."}}</td>
+        </tr>
+          {{#each controller.model.jmx}}
+          <tr>
+            <td>{{humanize key}}</td>
+            <td>{{formatWordBreak value}}</td>
+          </tr>
+          {{/each}}
+        </tbody>
+      </table>
+    {{/bs-panel}}
   </div>
-  {{#if controller.model.showMetrics}}
-    <div class="panel panel-default">
+  <div class="col-md-6">
+    {{#bs-panel heading="Components" class="panel-components"}}
+      <table class="table no-borders table-condensed">
+        <tbody>
+          {{#each controller.model.components}}
+          <tr>
+            <td>{{componentName}}</td>
+            <td>{{host}}</td>
+          </tr>
+          {{/each}}
+        </tbody>
+      </table>
+    {{/bs-panel}}
+    <div class="panel panel-default panel-alerts">
       <div class="panel-heading">
-        {{t common.metrics}}
+        {{t common.alerts}}
         <div class="btn-group pull-right panel-link">
-          <a class="btn btn-default btn-sm" target="_blank" rel="tooltip"
-            {{translateAttr title="sliderApp.summary.go_to_ganglia"}}
-            {{bind-attr href="view.gangliaUrl"}}>
+          {{#if App.nagiosHost}}
+            <a class="btn btn-default btn-sm" target="_blank" rel="tooltip"
+              {{translateAttr title="sliderApp.summary.go_to_nagios"}}
+              {{bind-attr href="view.nagiosUrl"}}>
               <i class="icon-link"></i>
-          </a>
+            </a>
+          {{/if}}
         </div>
       </div>
-      <div class="panel-body">
-        {{#each graphs in view.graphs}}
-       <div class="row">
-         {{#each graph in graphs}}
-           <div class="col-md-3">{{view graph}}</div>
-         {{/each}}
-       </div>
-     {{/each}}
+      <div class="app-alerts">
+        <ul>
+          {{#each controller.model.alerts}}
+            {{#view view.AlertView contentBinding="this"}}
+              <div class="container-fluid">
+                <div class="row">
+                  <div class="col-md-1 status-icon">
+                    <i {{bind-attr class="iconClass :icon-large"}}></i>
+                  </div>
+                  <div class="col-md-11">
+                    <div class="row">
+                      <div class="col-md-7 title">{{title}}</div>
+                      <div {{bs-bind-tooltip view.tooltip}} data-placement="right" class="col-md-5 date-time">{{timeSinceAlert}}</div>
+                    </div>
+                    <div class="message">{{message}}</div>
+                  </div>
+                </div>
+              </div>
+            {{/view}}
+          {{/each}}
+        </ul>
       </div>
     </div>
-  {{/if}}
+  </div>
 </div>
+{{#if controller.model.showMetrics}}
+  <div class="panel panel-default">
+    <div class="panel-heading">
+      {{t common.metrics}}
+      <div class="btn-group pull-right panel-link">
+        <a class="btn btn-default btn-sm" target="_blank" rel="tooltip"
+          {{translateAttr title="sliderApp.summary.go_to_ganglia"}}
+          {{bind-attr href="view.gangliaUrl"}}>
+          <i class="icon-link"></i>
+        </a>
+      </div>
+    </div>
+    <div class="panel-body">
+      {{#each graphs in view.graphs}}
+        <div class="row">
+          {{#each graph in graphs}}
+            <div class="col-md-3">{{view graph}}</div>
+          {{/each}}
+        </div>
+      {{/each}}
+    </div>
+  </div>
+{{/if}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b52db2a/contrib/views/slider/src/main/resources/ui/app/views/slider_app/summary_view.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/views/slider_app/summary_view.js b/contrib/views/slider/src/main/resources/ui/app/views/slider_app/summary_view.js
index 1be586d..6351f77 100644
--- a/contrib/views/slider/src/main/resources/ui/app/views/slider_app/summary_view.js
+++ b/contrib/views/slider/src/main/resources/ui/app/views/slider_app/summary_view.js
@@ -29,14 +29,14 @@ App.SliderAppSummaryView = Ember.View.extend({
    */
   gangliaUrl: function () {
     return 'http://' + App.get('gangliaHost') + '/ganglia';
-  }.property(),
+  }.property('App.gangliaHost'),
 
   /**
    * @type {string}
    */
   nagiosUrl: function () {
     return 'http://' + App.get('nagiosHost') + '/nagios';
-  }.property(),
+  }.property('App.nagiosHost'),
 
   fitPanels: function () {
     var heightLeft = parseInt(this.$('.panel-summury').css('height'));


[11/30] git commit: AMBARI-7268. Configs: need better handling of notes in config history table.(XIWANG)

Posted by jo...@apache.org.
AMBARI-7268. Configs: need better handling of notes in config history table.(XIWANG)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9bd68735
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9bd68735
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9bd68735

Branch: refs/heads/branch-alerts-dev
Commit: 9bd6873554c83c9ea3ed14adfe32206a9745fbb0
Parents: f3e76ef
Author: Xi Wang <xi...@apache.org>
Authored: Thu Sep 11 16:18:02 2014 -0700
Committer: Xi Wang <xi...@apache.org>
Committed: Thu Sep 11 17:00:34 2014 -0700

----------------------------------------------------------------------
 .../main/dashboard/config_history_controller.js       |  2 +-
 ambari-web/app/models/service_config_version.js       |  5 ++++-
 ambari-web/app/styles/application.less                | 12 +++++++++++-
 .../templates/common/configs/config_history_flow.hbs  |  8 ++++----
 .../app/templates/main/dashboard/config_history.hbs   | 14 +++++++++++++-
 .../app/views/main/dashboard/config_history_view.js   |  8 ++++++--
 6 files changed, 39 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9bd68735/ambari-web/app/controllers/main/dashboard/config_history_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/dashboard/config_history_controller.js b/ambari-web/app/controllers/main/dashboard/config_history_controller.js
index 41816bc..e6d8614 100644
--- a/ambari-web/app/controllers/main/dashboard/config_history_controller.js
+++ b/ambari-web/app/controllers/main/dashboard/config_history_controller.js
@@ -46,7 +46,7 @@ App.MainConfigHistoryController = Em.ArrayController.extend(App.TableServerMixin
     associations[2] = 'configGroup';
     associations[3] = 'createTime';
     associations[4] = 'author';
-    associations[5] = 'briefNotes';
+    associations[5] = 'notes';
     return associations;
   }.property(),
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bd68735/ambari-web/app/models/service_config_version.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/service_config_version.js b/ambari-web/app/models/service_config_version.js
index 1a3922c..4a65ac2 100644
--- a/ambari-web/app/models/service_config_version.js
+++ b/ambari-web/app/models/service_config_version.js
@@ -43,7 +43,10 @@ App.ServiceConfigVersion = DS.Model.extend({
     return (this.get('groupName') === 'default') ? (this.get('displayName') + ' ' + Em.I18n.t('common.default')) : this.get('groupName');
   }.property('groupName'),
   briefNotes: function () {
-    return (typeof this.get('notes') === 'string') ? this.get('notes').slice(0, 100) : "";
+    return (typeof this.get('notes') === 'string') ? this.get('notes').slice(0, 81) : "";
+  }.property('notes'),
+  moreNotesExists: function () {
+    return (typeof this.get('notes') === 'string') ?  this.get('notes').length > 80 : false;
   }.property('notes'),
   versionText: function () {
     return Em.I18n.t('dashboard.configHistory.table.version.versionText').format(this.get('version'));

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bd68735/ambari-web/app/styles/application.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/application.less b/ambari-web/app/styles/application.less
index 31ddc76..9dc4307 100644
--- a/ambari-web/app/styles/application.less
+++ b/ambari-web/app/styles/application.less
@@ -553,6 +553,9 @@ h1 {
 
 .tooltip-inner {
   text-align: left;
+  max-width: 400px;
+  overflow-wrap: break-word;
+  white-space:pre-wrap;
 }
 
 .popover {
@@ -4944,6 +4947,9 @@ ul.inline li {
         width: 75%;
       }
     }
+    td.notes .show-more-button {
+      font-size: @default-font-size - 1;
+    }
   }
   div.page-bar div.filtered-info {
     margin-left: 17px;
@@ -5015,7 +5021,7 @@ ul.inline li {
         left: -45px;
         z-index: 1000;
         float: left;
-        min-width: 300px;
+        width: 300px;
         padding: 8px;
         list-style: none;
         background-color: #ffffff;
@@ -5031,7 +5037,9 @@ ul.inline li {
           padding: 1px 5px 15px 5px;
           text-align: left;
           .notes{
+            overflow-wrap: break-word;
             word-wrap: break-word;
+            white-space: pre-wrap;
           }
           .date{
             color: #808080;
@@ -5191,6 +5199,8 @@ ul.inline li {
         }
         .notes{
           word-wrap: break-word;
+          overflow-wrap: break-word;
+          white-space: pre-wrap;
         }
       }
       .btn {

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bd68735/ambari-web/app/templates/common/configs/config_history_flow.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/configs/config_history_flow.hbs b/ambari-web/app/templates/common/configs/config_history_flow.hbs
index 4c79c57..efc452f 100644
--- a/ambari-web/app/templates/common/configs/config_history_flow.hbs
+++ b/ambari-web/app/templates/common/configs/config_history_flow.hbs
@@ -45,7 +45,7 @@
             <div class="content"> <strong>{{serviceVersion.displayName}}</strong> <span class="label label-info">{{serviceVersion.versionText}}</span> &nbsp;
               <strong>{{t services.service.config.configHistory.configGroup}}:{{serviceVersion.configGroupName}}</strong>
               <div class="date">{{serviceVersion.createdDate}}</div>
-              <div class="notes">{{serviceVersion.briefNotes}}</div>
+              <div class="notes">{{serviceVersion.notes}}</div>
             </div>
             <div class="version-operations-buttons">
                 <button {{bindAttr disabled="serviceVersion.disabledActionAttr.view" class=":btn serviceVersion.isDisplayed:not-allowed-cursor" title="serviceVersion.disabledActionMessages.view"}} {{action switchVersion serviceVersion target="view"}}><i class="icon-search"></i>&nbsp;{{t common.view}}</button>
@@ -69,7 +69,7 @@
                   <i class="icon-remove-circle icon-large"></i>
               </div>
               <div class="label-wrapper span8"
-                   data-toggle="tooltip" {{bindAttr data-original-title="view.compareServiceVersion.briefNotes"}}>
+                   data-toggle="tooltip" {{bindAttr data-original-title="view.compareServiceVersion.notes"}}>
                   {{t services.service.config.configHistory.comparing}}
                   <span class="label label-info">{{view.displayedServiceVersion.versionText}}</span>
                   ...
@@ -107,7 +107,7 @@
                                 <div class="content"> <strong>{{serviceVersion.displayName}}</strong> <span class="label label-info">{{serviceVersion.versionText}}</span> &nbsp;
                                     <strong>{{t services.service.config.configHistory.configGroup}}:{{serviceVersion.configGroupName}}</strong>
                                     <div class="date">{{serviceVersion.createdDate}}</div>
-                                    <div class="notes">{{serviceVersion.briefNotes}}</div>
+                                    <div class="notes">{{serviceVersion.notes}}</div>
                                 </div>
                                 <div class="version-operations-buttons">
                                     <button {{bindAttr disabled="serviceVersion.disabledActionAttr.view" class=":btn serviceVersion.isDisplayed:not-allowed-cursor" title="serviceVersion.disabledActionMessages.view"}} {{action switchVersion serviceVersion target="view"}}><i class="icon-search"></i>&nbsp;{{t common.view}}</button>
@@ -128,7 +128,7 @@
                   </ul>
               </div>
             {{/if}}
-              <div class="label-wrapper span8" data-toggle="tooltip" {{bindAttr data-original-title="view.displayedServiceVersion.briefNotes"}}>
+              <div class="label-wrapper span8" data-toggle="tooltip" {{bindAttr data-original-title="view.displayedServiceVersion.notes"}}>
                   <span class="label label-info">{{view.displayedServiceVersion.versionText}}</span>
                 {{#if view.displayedServiceVersion.isCurrent}}
                     <span class="label label-success">{{t common.current}}</span>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bd68735/ambari-web/app/templates/main/dashboard/config_history.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/config_history.hbs b/ambari-web/app/templates/main/dashboard/config_history.hbs
index 8156e58..aa33887 100644
--- a/ambari-web/app/templates/main/dashboard/config_history.hbs
+++ b/ambari-web/app/templates/main/dashboard/config_history.hbs
@@ -57,7 +57,19 @@
             </td>
             <td>{{item.createdDate}}</td>
             <td>{{item.author}}</td>
-            <td>{{item.briefNotes}}</td>
+            <td class="notes">
+              {{#if item.moreNotesExists}}
+                {{#if view.showLessNotes}}
+                  {{item.briefNotes}}
+                  <a {{action toggleShowLessStatus target="view"}} class="show-more-button">>> More</a>
+                {{else}}
+                  {{item.notes}}
+                  <a {{action toggleShowLessStatus target="view"}} class="show-more-button"><< Less</a>
+                {{/if}}
+              {{else}}
+                {{item.briefNotes}}
+              {{/if}}
+            </td>
           {{/view}}
         {{/each}}
       {{else}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bd68735/ambari-web/app/views/main/dashboard/config_history_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/config_history_view.js b/ambari-web/app/views/main/dashboard/config_history_view.js
index ea01bb6..436451e 100644
--- a/ambari-web/app/views/main/dashboard/config_history_view.js
+++ b/ambari-web/app/views/main/dashboard/config_history_view.js
@@ -115,7 +115,7 @@ App.MainConfigHistoryView = App.TableView.extend({
   }),
   notesSort: sort.fieldView.extend({
     column: 5,
-    name: 'briefNotes',
+    name: 'notes',
     displayName: Em.I18n.t('common.notes')
   }),
 
@@ -193,7 +193,11 @@ App.MainConfigHistoryView = App.TableView.extend({
 
   ConfigVersionView: Em.View.extend({
     tagName: 'tr',
-    didInsertElement: function(){
+    showLessNotes: true,
+    toggleShowLessStatus: function () {
+      this.set('showLessNotes', !this.get('showLessNotes'));
+    },
+    didInsertElement: function () {
       App.tooltip(this.$("[rel='Tooltip']"));
     }
   }),


[09/30] git commit: AMBARI-7267 ATS SPNEGO HTTP keytab should be spnego.service.keytab (alejandro)

Posted by jo...@apache.org.
AMBARI-7267 ATS SPNEGO HTTP keytab should be spnego.service.keytab (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f3e76efe
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f3e76efe
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f3e76efe

Branch: refs/heads/branch-alerts-dev
Commit: f3e76efe012b9dfea262d9504cefda85ed6ce7d8
Parents: 91a01f1
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Thu Sep 11 14:00:22 2014 -0700
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Thu Sep 11 16:56:14 2014 -0700

----------------------------------------------------------------------
 ambari-web/app/data/HDP2/secure_properties.js | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f3e76efe/ambari-web/app/data/HDP2/secure_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/secure_properties.js b/ambari-web/app/data/HDP2/secure_properties.js
index b67bf29..a2c3b98 100644
--- a/ambari-web/app/data/HDP2/secure_properties.js
+++ b/ambari-web/app/data/HDP2/secure_properties.js
@@ -516,10 +516,10 @@ module.exports =
     {
       "id": "puppet var",
       "name": "apptimelineserver_http_keytab",
-      "displayName": "Path to App Timeline Server HTTP keytab file",
+      "displayName": "Path to App Timeline Server SPNEGO HTTP keytab file",
       "value": "",
-      "defaultValue": "/etc/security/keytabs/yarn.service.keytab",
-      "description": "Path to App Timeline Server HTTP keytab file",
+      "defaultValue": "/etc/security/keytabs/spnego.service.keytab",
+      "description": "Path to App Timeline Server SPNEGO HTTP keytab file",
       "displayType": "directory",
       "isVisible": true,
       "isOverridable": true,


[20/30] git commit: AMBARI-7279 Config History: capacity-scheduler properties from service_config_version should be in same order as before (dsen)

Posted by jo...@apache.org.
AMBARI-7279 Config History: capacity-scheduler properties from service_config_version should be in same order as before (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ae480151
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ae480151
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ae480151

Branch: refs/heads/branch-alerts-dev
Commit: ae4801519645b960f66fd6a97a5f4819f9ee2a89
Parents: 2fbbfb3
Author: Dmytro Sen <ds...@hortonworks.com>
Authored: Fri Sep 12 17:25:15 2014 +0300
Committer: Dmytro Sen <ds...@hortonworks.com>
Committed: Fri Sep 12 17:42:14 2014 +0300

----------------------------------------------------------------------
 .../controller/internal/ServiceConfigVersionResourceProvider.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ae480151/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
index f284030..f055375 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
@@ -170,7 +170,7 @@ public class ServiceConfigVersionResourceProvider extends
       configMap.put("type", config.getType());
       configMap.put("tag", config.getVersionTag());
       configMap.put("version", config.getVersion());
-      configMap.put("properties", config.getConfigs());
+      configMap.put("properties", new TreeMap(config.getConfigs()));
       configMap.put("properties_attributes", config.getConfigAttributes());
       result.add(configMap);
     }


[10/30] git commit: AMBARI-7272. Configs: make final icon more clear selected / not selected.(xiwang)

Posted by jo...@apache.org.
AMBARI-7272. Configs: make final icon more clear selected / not selected.(xiwang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e71d4fa8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e71d4fa8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e71d4fa8

Branch: refs/heads/branch-alerts-dev
Commit: e71d4fa8facd19d0430960025ba3d49dc19737a3
Parents: 9bd6873
Author: Xi Wang <xi...@apache.org>
Authored: Thu Sep 11 16:43:58 2014 -0700
Committer: Xi Wang <xi...@apache.org>
Committed: Thu Sep 11 17:00:34 2014 -0700

----------------------------------------------------------------------
 ambari-web/app/styles/application.less | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e71d4fa8/ambari-web/app/styles/application.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/application.less b/ambari-web/app/styles/application.less
index 9dc4307..e4b455e 100644
--- a/ambari-web/app/styles/application.less
+++ b/ambari-web/app/styles/application.less
@@ -1204,12 +1204,16 @@ h1 {
       color: rgb(243, 178, 11);
       margin-right: 2px;
     }
-    .btn-final .icon-lock {
-      color: grey;
+    .btn-final{
+      background: none repeat scroll 0 0 #fff;
+    }
+    .btn-final .icon-lock{
+      color: #a6a6a6;
     }
     .btn-final.active .icon-lock {
       color: blue;
     }
+    .btn-final.active,
     .btn-final.active[disabled] { //copied from Bootstrap .btn.active
       background-color: #e6e6e6;
       background-color: #d9d9d9 \9;


[25/30] git commit: AMBARI-7227 - Views : Extract System view on ambari-server setup

Posted by jo...@apache.org.
AMBARI-7227 - Views : Extract System view on ambari-server setup


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/093ed17d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/093ed17d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/093ed17d

Branch: refs/heads/branch-alerts-dev
Commit: 093ed17d46e6b3e2e1528e27e4cbd42e18a9d610
Parents: bd28cd9
Author: tbeerbower <tb...@hortonworks.com>
Authored: Thu Sep 11 16:38:57 2014 -0400
Committer: tbeerbower <tb...@hortonworks.com>
Committed: Fri Sep 12 14:59:48 2014 -0400

----------------------------------------------------------------------
 .../ambari/server/view/ViewArchiveUtility.java  | 120 ++++++
 .../ambari/server/view/ViewExtractor.java       | 223 ++++++++++
 .../apache/ambari/server/view/ViewRegistry.java | 429 ++++++-------------
 ambari-server/src/main/python/ambari-server.py  |  29 ++
 .../AmbariPrivilegeResourceProviderTest.java    |   2 +-
 .../ViewPrivilegeResourceProviderTest.java      |   2 +-
 .../ambari/server/view/ViewExtractorTest.java   | 262 +++++++++++
 .../ambari/server/view/ViewRegistryTest.java    | 105 +++--
 .../src/test/python/TestAmbariServer.py         |   8 +-
 9 files changed, 834 insertions(+), 346 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/093ed17d/ambari-server/src/main/java/org/apache/ambari/server/view/ViewArchiveUtility.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewArchiveUtility.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewArchiveUtility.java
new file mode 100644
index 0000000..f5f2732
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewArchiveUtility.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.view;
+
+import org.apache.ambari.server.view.configuration.ViewConfig;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Unmarshaller;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.jar.JarFile;
+
+/**
+ * Helper class for basic view archive utility.
+ */
+public class ViewArchiveUtility {
+
+  /**
+   * Constants
+   */
+  private static final String VIEW_XML = "view.xml";
+
+
+  // ----- ViewArchiveUtility ------------------------------------------------
+
+  /**
+   * Get the view configuration from the given archive file.
+   *
+   * @param archiveFile  the archive file
+   *
+   * @return the associated view configuration
+   */
+  public ViewConfig getViewConfigFromArchive(File archiveFile)
+      throws MalformedURLException, JAXBException {
+    ClassLoader cl = URLClassLoader.newInstance(new URL[]{archiveFile.toURI().toURL()});
+
+    InputStream configStream      = cl.getResourceAsStream(VIEW_XML);
+    JAXBContext jaxbContext       = JAXBContext.newInstance(ViewConfig.class);
+    Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();
+
+    return (ViewConfig) jaxbUnmarshaller.unmarshal(configStream);
+  }
+
+  /**
+   * Get the view configuration from the extracted archive file.
+   *
+   * @param archivePath path to extracted archive
+   *
+   * @return the associated view configuration
+   *
+   * @throws JAXBException if xml is malformed
+   * @throws java.io.FileNotFoundException if xml was not found
+   */
+  public ViewConfig getViewConfigFromExtractedArchive(String archivePath)
+      throws JAXBException, FileNotFoundException {
+
+    InputStream configStream      = new FileInputStream(new File(archivePath + File.separator + VIEW_XML));
+    JAXBContext  jaxbContext      = JAXBContext.newInstance(ViewConfig.class);
+    Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();
+
+    return (ViewConfig) jaxbUnmarshaller.unmarshal(configStream);
+  }
+
+  /**
+   * Get a new file instance for the given path.
+   *
+   * @param path  the path
+   *
+   * @return a new file instance
+   */
+  public File getFile(String path) {
+    return new File(path);
+  }
+
+  /**
+   * Get a new file output stream for the given file.
+   *
+   * @param file  the file
+   *
+   * @return a new file output stream
+   */
+  public FileOutputStream getFileOutputStream(File file) throws FileNotFoundException {
+    return new FileOutputStream(file);
+  }
+
+  /**
+   * Get a new jar file instance from the given file.
+   *
+   * @param file  the file
+   *
+   * @return a new jar file instance
+   */
+  public JarFile getJarFile(File file) throws IOException {
+    return new JarFile(file);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/093ed17d/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
new file mode 100644
index 0000000..368e92c
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
@@ -0,0 +1,223 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.view;
+
+import org.apache.ambari.server.orm.entities.ViewEntity;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.Enumeration;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+
+/**
+ * Extractor for view archives.
+ */
+public class ViewExtractor {
+
+  /**
+   * Constants
+   */
+  private static final String ARCHIVE_CLASSES_DIR = "WEB-INF/classes";
+  private static final String ARCHIVE_LIB_DIR     = "WEB-INF/lib";
+
+  @Inject
+  ViewArchiveUtility archiveUtility;
+
+  /**
+   * The logger.
+   */
+  protected final static Logger LOG = LoggerFactory.getLogger(ViewExtractor.class);
+
+
+  // ----- ViewExtractor -----------------------------------------------------
+
+  /**
+   * Extract the given view archive to the given archive directory.
+   *
+   * @param view         the view entity
+   * @param viewArchive  the view archive file
+   * @param archiveDir   the view archive directory
+   *
+   * @return the class loader for the archive classes
+   *
+   * @throws ExtractionException if the archive can not be extracted
+   */
+  public ClassLoader extractViewArchive(ViewEntity view, File viewArchive, File archiveDir)
+      throws ExtractionException {
+
+    String archivePath = archiveDir.getAbsolutePath();
+
+    try {
+      // Skip if the archive has already been extracted
+      if (!archiveDir.exists()) {
+
+        String msg = "Creating archive folder " + archivePath + ".";
+
+        view.setStatusDetail(msg);
+        LOG.info(msg);
+
+        if (archiveDir.mkdir()) {
+          JarFile viewJarFile = archiveUtility.getJarFile(viewArchive);
+          Enumeration enumeration = viewJarFile.entries();
+
+          msg = "Extracting files from " + viewArchive.getName() + ".";
+
+          view.setStatusDetail(msg);
+          LOG.info(msg);
+
+          while (enumeration.hasMoreElements()) {
+            JarEntry jarEntry  = (JarEntry) enumeration.nextElement();
+            String   entryPath = archivePath + File.separator + jarEntry.getName();
+
+            File entryFile = archiveUtility.getFile(entryPath);
+
+            if (jarEntry.isDirectory()) {
+              if (!entryFile.mkdir()) {
+                msg = "Could not create archive entry directory " + entryPath + ".";
+
+                view.setStatusDetail(msg);
+                LOG.error(msg);
+                throw new ExtractionException(msg);
+              }
+            } else {
+              InputStream is = viewJarFile.getInputStream(jarEntry);
+              try {
+                FileOutputStream fos = archiveUtility.getFileOutputStream(entryFile);
+                try {
+                  while (is.available() > 0) {
+                    fos.write(is.read());
+                  }
+                } finally {
+                  fos.close();
+                }
+              } finally {
+                is.close();
+              }
+            }
+          }
+        } else {
+          msg = "Could not create archive directory " + archivePath + ".";
+
+          view.setStatusDetail(msg);
+          LOG.error(msg);
+          throw new ExtractionException(msg);
+        }
+      }
+      return getArchiveClassLoader(archiveDir);
+
+    } catch (Exception e) {
+      String msg = "Caught exception trying to extract the view archive " + archivePath + ".";
+
+      view.setStatusDetail(msg);
+      LOG.error(msg, e);
+      throw new ExtractionException(msg, e);
+    }
+  }
+
+  /**
+   * Ensure that the extracted view archive directory exists.
+   *
+   * @param extractedArchivesPath  the path
+   *
+   * @return false if the directory does not exist and can not be created
+   */
+  public boolean ensureExtractedArchiveDirectory(String extractedArchivesPath) {
+
+    File extractedArchiveDir = archiveUtility.getFile(extractedArchivesPath);
+
+    return extractedArchiveDir.exists() || extractedArchiveDir.mkdir();
+  }
+
+
+  // ----- archiveUtility methods ----------------------------------------------------
+
+  // get a class loader for the given archive directory
+  private ClassLoader getArchiveClassLoader(File archiveDir)
+      throws MalformedURLException {
+
+    String    archivePath = archiveDir.getAbsolutePath();
+    List<URL> urlList     = new LinkedList<URL>();
+
+    // include the classes directory
+    String classesPath = archivePath + File.separator + ARCHIVE_CLASSES_DIR;
+    File   classesDir  = archiveUtility.getFile(classesPath);
+    if (classesDir.exists()) {
+      urlList.add(classesDir.toURI().toURL());
+    }
+
+    // include any libraries in the lib directory
+    String libPath = archivePath + File.separator + ARCHIVE_LIB_DIR;
+    File   libDir  = archiveUtility.getFile(libPath);
+    if (libDir.exists()) {
+      File[] files = libDir.listFiles();
+      if (files != null) {
+        for (final File fileEntry : files) {
+          if (!fileEntry.isDirectory()) {
+            urlList.add(fileEntry.toURI().toURL());
+          }
+        }
+      }
+    }
+
+    // include the archive directory
+    urlList.add(archiveDir.toURI().toURL());
+
+    return URLClassLoader.newInstance(urlList.toArray(new URL[urlList.size()]));
+  }
+
+
+  // ----- inner class : ExtractionException ---------------------------------
+
+  /**
+   * General exception for view archive extraction.
+   */
+  public static class ExtractionException extends Exception {
+
+    // ----- Constructors ----------------------------------------------------
+
+    /**
+     * Construct an extraction exception.
+     *
+     * @param msg  the exception message
+     */
+    public ExtractionException(String msg) {
+      super(msg);
+    }
+
+    /**
+     * Construct an extraction exception.
+     *
+     * @param msg        the exception message
+     * @param throwable  the root cause
+     */
+    public ExtractionException(String msg, Throwable throwable) {
+      super(msg, throwable);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/093ed17d/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
index 509e474..1c6c792 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
@@ -18,46 +18,17 @@
 
 package org.apache.ambari.server.view;
 
-import java.beans.IntrospectionException;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-import java.util.jar.JarEntry;
-import java.util.jar.JarFile;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-import javax.xml.bind.JAXBContext;
-import javax.xml.bind.JAXBException;
-import javax.xml.bind.Unmarshaller;
-
 import com.google.common.collect.Sets;
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
 import org.apache.ambari.server.api.resources.ResourceInstanceFactoryImpl;
 import org.apache.ambari.server.api.resources.SubResourceDefinition;
 import org.apache.ambari.server.api.resources.ViewExternalSubResourceDefinition;
 import org.apache.ambari.server.api.services.ViewExternalSubResourceService;
 import org.apache.ambari.server.api.services.ViewSubResourceService;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.ControllerModule;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.orm.dao.MemberDAO;
 import org.apache.ambari.server.orm.dao.PrivilegeDAO;
@@ -103,9 +74,22 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.security.core.GrantedAuthority;
 
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
+import javax.inject.Inject;
+import javax.inject.Singleton;
+import java.beans.IntrospectionException;
+import java.io.File;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
 
 /**
  * Registry for view and view instance definitions.
@@ -116,9 +100,6 @@ public class ViewRegistry {
   /**
    * Constants
    */
-  private static final String VIEW_XML = "view.xml";
-  private static final String ARCHIVE_CLASSES_DIR = "WEB-INF/classes";
-  private static final String ARCHIVE_LIB_DIR = "WEB-INF/lib";
   private static final String EXTRACTED_ARCHIVES_DIR = "work";
 
   /**
@@ -150,11 +131,6 @@ public class ViewRegistry {
       new ConcurrentHashMap<String, Set<Listener>>();
 
   /**
-   * Helper class.
-   */
-  private ViewRegistryHelper helper = new ViewRegistryHelper();
-
-  /**
    * The singleton view registry instance.
    */
   private static ViewRegistry singleton;
@@ -224,10 +200,37 @@ public class ViewRegistry {
   @Inject
   ViewInstanceHandlerList handlerList;
 
+  /**
+   * The view extractor;
+   */
+  @Inject
+  ViewExtractor extractor;
+
+  /**
+   * The view archive utility.
+   */
+  @Inject
+  ViewArchiveUtility archiveUtility;
+
 
   // ----- ViewRegistry ------------------------------------------------------
 
   /**
+   * Registry main method.
+   *
+   * @param args  the command line arguments
+   *
+   * @throws Exception if the registry command can not be completed
+   */
+  public static void main(String[] args) throws Exception {
+
+    Injector injector = Guice.createInjector(new ControllerModule());
+    initInstance(injector.getInstance(ViewRegistry.class));
+
+    singleton.readViewArchives(true, false);
+  }
+
+  /**
    * Get the collection of all the view definitions.
    *
    * @return the collection of view definitions
@@ -396,56 +399,10 @@ public class ViewRegistry {
   }
 
   /**
-   * Asynchronously read the view archives.
+   * Read the view archives.
    */
   public void readViewArchives() {
-
-    final ExecutorService executorService = getExecutorService(configuration);
-
-    // submit a task to manage the extraction tasks
-    executorService.submit(new Runnable() {
-      @Override
-      public void run() {
-
-        try {
-          File viewDir = configuration.getViewsDir();
-
-          String extractedArchivesPath = viewDir.getAbsolutePath() +
-              File.separator + EXTRACTED_ARCHIVES_DIR;
-
-          if (ensureExtractedArchiveDirectory(extractedArchivesPath)) {
-            File[] files = viewDir.listFiles();
-
-            if (files != null) {
-              for (final File archiveFile : files) {
-                if (!archiveFile.isDirectory()) {
-
-                  final ViewConfig viewConfig = helper.getViewConfigFromArchive(archiveFile);
-
-                  String commonName = viewConfig.getName();
-                  String version    = viewConfig.getVersion();
-                  String viewName   = ViewEntity.getViewName(commonName, version);
-
-                  final String     archivePath    = extractedArchivesPath + File.separator + viewName;
-                  final ViewEntity viewDefinition = new ViewEntity(viewConfig, configuration, archivePath);
-
-                  // submit a new task for each archive being read
-                  executorService.submit(new Runnable() {
-                    @Override
-                    public void run() {
-                      readViewArchive(viewDefinition, archiveFile, archivePath, viewConfig);
-                    }
-                  });
-                }
-              }
-              removeUndeployedViews();
-            }
-          }
-        } catch (Exception e) {
-          LOG.error("Caught exception reading view archives.", e);
-        }
-      }
-    });
+    readViewArchives(false, true);
   }
 
   /**
@@ -757,15 +714,6 @@ public class ViewRegistry {
     listeners.clear();
   }
 
-  /**
-   * Set the helper.
-   *
-   * @param helper  the helper
-   */
-  protected void setHelper(ViewRegistryHelper helper) {
-    this.helper = helper;
-  }
-
   // get a view entity for the given internal view name
   private ViewEntity getDefinition(String viewName) {
     return viewDefinitions.get(viewName);
@@ -1115,110 +1063,6 @@ public class ViewRegistry {
     return resourceEntity;
   }
 
-  // ensure that the extracted view archive directory exists
-  private boolean ensureExtractedArchiveDirectory(String extractedArchivesPath) {
-    File extractedArchiveDir = helper.getFile(extractedArchivesPath);
-
-    if (!extractedArchiveDir.exists()) {
-      if (!extractedArchiveDir.mkdir()) {
-        LOG.error("Could not create extracted view archive directory " +
-            extractedArchivesPath + ".");
-        return false;
-      }
-    }
-    return true;
-  }
-
-  // extract the given view archive to the given archive directory
-  private ClassLoader extractViewArchive(ViewEntity viewDefinition, File viewArchive, File archiveDir)
-      throws IOException {
-
-    // Skip if the archive has already been extracted
-    if (!archiveDir.exists()) {
-
-      String archivePath = archiveDir.getAbsolutePath();
-
-      String msg = "Creating archive folder " + archivePath + ".";
-      LOG.info(msg);
-      setViewStatus(viewDefinition, ViewDefinition.ViewStatus.LOADING, msg);
-
-      if (archiveDir.mkdir()) {
-        JarFile     viewJarFile = helper.getJarFile(viewArchive);
-        Enumeration enumeration = viewJarFile.entries();
-
-        msg = "Extracting files from " + viewArchive.getName() + ":";
-        LOG.info(msg);
-        setViewStatus(viewDefinition, ViewDefinition.ViewStatus.LOADING, msg);
-
-        while (enumeration.hasMoreElements()) {
-          JarEntry jarEntry  = (JarEntry) enumeration.nextElement();
-          String   entryPath = archivePath + File.separator + jarEntry.getName();
-
-          LOG.info("    " + entryPath);
-
-          File entryFile = helper.getFile(entryPath);
-
-          if (jarEntry.isDirectory()) {
-            if (!entryFile.mkdir()) {
-              LOG.error("Could not create archive entry directory " + entryPath + ".");
-            }
-          } else {
-            InputStream is = viewJarFile.getInputStream(jarEntry);
-            try {
-              FileOutputStream fos = helper.getFileOutputStream(entryFile);
-              try {
-                while (is.available() > 0) {
-                  fos.write(is.read());
-                }
-              } finally {
-                fos.close();
-              }
-            } finally {
-              is.close();
-            }
-          }
-        }
-      } else {
-        LOG.error("Could not create archive directory " + archivePath + ".");
-      }
-    }
-    return getArchiveClassLoader(archiveDir);
-  }
-
-  // get a class loader for the given archive directory
-  private ClassLoader getArchiveClassLoader(File archiveDir)
-      throws MalformedURLException {
-
-    String    archivePath = archiveDir.getAbsolutePath();
-    List<URL> urlList     = new LinkedList<URL>();
-
-    // include the classes directory
-    String classesPath = archivePath + File.separator + ARCHIVE_CLASSES_DIR;
-    File   classesDir  = helper.getFile(classesPath);
-    if (classesDir.exists()) {
-      urlList.add(classesDir.toURI().toURL());
-    }
-
-    // include any libraries in the lib directory
-    String libPath = archivePath + File.separator + ARCHIVE_LIB_DIR;
-    File   libDir  = helper.getFile(libPath);
-    if (libDir.exists()) {
-      File[] files = libDir.listFiles();
-      if (files != null) {
-        for (final File fileEntry : files) {
-          if (!fileEntry.isDirectory()) {
-            urlList.add(fileEntry.toURI().toURL());
-          }
-        }
-      }
-    }
-
-    // include the archive directory
-    urlList.add(archiveDir.toURI().toURL());
-
-    return URLClassLoader.newInstance(urlList.toArray(new URL[urlList.size()]));
-  }
-
   // notify the view identified by the given view name of the given event
   private void fireEvent(Event event, String viewName) {
     Set<Listener> listeners = this.listeners.get(viewName);
@@ -1265,22 +1109,94 @@ public class ViewRegistry {
     }
   }
 
-  // read a view archive and return the set of new view instances
+
+  // read the view archives.
+  private void readViewArchives(boolean systemOnly, boolean useExecutor) {
+    try {
+      File viewDir = configuration.getViewsDir();
+
+      String extractedArchivesPath = viewDir.getAbsolutePath() +
+          File.separator + EXTRACTED_ARCHIVES_DIR;
+
+      if (extractor.ensureExtractedArchiveDirectory(extractedArchivesPath)) {
+
+        File[] files  = viewDir.listFiles();
+
+        if (files != null) {
+
+          Set<Runnable> extractionRunnables = new HashSet<Runnable>();
+
+          for (final File archiveFile : files) {
+            if (!archiveFile.isDirectory()) {
+
+              final ViewConfig viewConfig = archiveUtility.getViewConfigFromArchive(archiveFile);
+
+              String commonName = viewConfig.getName();
+              String version    = viewConfig.getVersion();
+              String viewName   = ViewEntity.getViewName(commonName, version);
+
+              final String extractedArchiveDirPath = extractedArchivesPath + File.separator + viewName;
+              final File extractedArchiveDirFile = archiveUtility.getFile(extractedArchiveDirPath);
+
+              final ViewEntity viewDefinition = new ViewEntity(viewConfig, configuration, extractedArchiveDirPath);
+
+              boolean systemView = viewDefinition.isSystem();
+
+              if (!systemOnly || systemView) {
+                // update the registry with the view
+                addDefinition(viewDefinition);
+
+                // always load system views up front
+                if (systemView || !useExecutor || extractedArchiveDirFile.exists()) {
+                  // if the archive is already extracted then load the view now
+                  readViewArchive(viewDefinition, archiveFile, extractedArchiveDirFile, viewConfig);
+                } else {
+                  // if the archive needs to be extracted then create a runnable to do it
+                  extractionRunnables.add(new Runnable() {
+                    @Override
+                    public void run() {
+                      readViewArchive(viewDefinition, archiveFile, extractedArchiveDirFile, viewConfig);
+                    }
+                  });
+                }
+              }
+            }
+          }
+
+          if (useExecutor && extractionRunnables.size() > 0) {
+            final ExecutorService executorService = getExecutorService(configuration);
+
+            for (Runnable runnable : extractionRunnables) {
+              // submit a new task for each archive that needs extraction
+              executorService.submit(runnable);
+            }
+          }
+
+          removeUndeployedViews();
+        }
+      } else {
+        LOG.error("Could not create extracted view archive directory " + extractedArchivesPath + ".");
+      }
+    } catch (Exception e) {
+      LOG.error("Caught exception reading view archives.", e);
+    }
+  }
+
+  // read a view archive
   private void readViewArchive(ViewEntity viewDefinition,
                                                   File archiveFile,
-                                                  String archivePath,
+                                                  File extractedArchiveDirFile,
                                                   ViewConfig viewConfig) {
 
-    setViewStatus(viewDefinition, ViewEntity.ViewStatus.LOADING, "Loading " + archivePath + ".");
+    setViewStatus(viewDefinition, ViewEntity.ViewStatus.LOADING, "Loading " + extractedArchiveDirFile + ".");
 
-    try {
-      // update the registry with the view
-      addDefinition(viewDefinition);
+    String extractedArchiveDirPath = extractedArchiveDirFile.getAbsolutePath();
 
+    try {
       // extract the archive and get the class loader
-      ClassLoader cl = extractViewArchive(viewDefinition, archiveFile, helper.getFile(archivePath));
+      ClassLoader cl = extractor.extractViewArchive(viewDefinition, archiveFile, extractedArchiveDirFile);
 
-      viewConfig = helper.getViewConfigFromExtractedArchive(archivePath);
+      viewConfig = archiveUtility.getViewConfigFromExtractedArchive(extractedArchiveDirPath);
 
       setupViewDefinition(viewDefinition, viewConfig, cl);
 
@@ -1301,12 +1217,12 @@ public class ViewRegistry {
         addInstanceDefinition(viewDefinition, instanceEntity);
         handlerList.addViewInstance(instanceEntity);
       }
-      setViewStatus(viewDefinition, ViewEntity.ViewStatus.LOADED, "Loaded " + archivePath + ".");
+      setViewStatus(viewDefinition, ViewEntity.ViewStatus.LOADED, "Loaded " + extractedArchiveDirPath + ".");
 
     } catch (Exception e) {
-      String msg = "Caught exception loading view " + viewDefinition.getViewName() + " : " + e.getMessage();
+      String msg = "Caught exception loading view " + viewDefinition.getViewName();
 
-      setViewStatus(viewDefinition, ViewEntity.ViewStatus.ERROR, msg);
+      setViewStatus(viewDefinition, ViewEntity.ViewStatus.ERROR, msg + " : " + e.getMessage());
       LOG.error(msg, e);
     }
   }
@@ -1334,85 +1250,4 @@ public class ViewRegistry {
     }
     return executorService;
   }
-
-
-
-  // ----- inner class : ViewRegistryHelper ----------------------------------
-
-  /**
-   * Registry helper class.
-   */
-  protected static class ViewRegistryHelper {
-
-    /**
-     * Get the view configuration from the given archive file.
-     *
-     * @param archiveFile  the archive file
-     *
-     * @return the associated view configuration
-     */
-    public ViewConfig getViewConfigFromArchive(File archiveFile)
-        throws MalformedURLException, JAXBException {
-      ClassLoader cl = URLClassLoader.newInstance(new URL[]{archiveFile.toURI().toURL()});
-
-      InputStream  configStream     = cl.getResourceAsStream(VIEW_XML);
-      JAXBContext  jaxbContext      = JAXBContext.newInstance(ViewConfig.class);
-      Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();
-
-      return (ViewConfig) jaxbUnmarshaller.unmarshal(configStream);
-    }
-
-    /**
-     * Get the view configuration from the extracted archive file.
-     *
-     * @param archivePath path to extracted archive
-     *
-     * @return the associated view configuration
-     *
-     * @throws JAXBException if xml is malformed
-     * @throws FileNotFoundException if xml was not found
-     */
-    public ViewConfig getViewConfigFromExtractedArchive(String archivePath)
-        throws JAXBException, FileNotFoundException {
-
-      InputStream configStream      = new FileInputStream(new File(archivePath + File.separator + VIEW_XML));
-      JAXBContext  jaxbContext      = JAXBContext.newInstance(ViewConfig.class);
-      Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();
-
-      return (ViewConfig) jaxbUnmarshaller.unmarshal(configStream);
-    }
-
-    /**
-     * Get a new file instance for the given path.
-     *
-     * @param path  the path
-     *
-     * @return a new file instance
-     */
-    public File getFile(String path) {
-      return new File(path);
-    }
-
-    /**
-     * Get a new file output stream for the given file.
-     *
-     * @param file  the file
-     *
-     * @return a new file output stream
-     */
-    public FileOutputStream getFileOutputStream(File file) throws FileNotFoundException {
-      return new FileOutputStream(file);
-    }
-
-    /**
-     * Get a new jar file instance from the given file.
-     *
-     * @param file  the file
-     *
-     * @return a new jar file instance
-     */
-    public JarFile getJarFile(File file) throws IOException {
-      return new JarFile(file);
-    }
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/093ed17d/ambari-server/src/main/python/ambari-server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari-server.py b/ambari-server/src/main/python/ambari-server.py
index 50420b2..319d1d4 100755
--- a/ambari-server/src/main/python/ambari-server.py
+++ b/ambari-server/src/main/python/ambari-server.py
@@ -185,6 +185,14 @@ STACK_UPGRADE_HELPER_CMD = "{0}" + os.sep + "bin" + os.sep + "java -cp {1}" +\
                           os.pathsep + "{2} " +\
                           "org.apache.ambari.server.upgrade.StackUpgradeHelper" +\
                           " {3} {4} > " + SERVER_OUT_FILE + " 2>&1"
+
+
+VIEW_EXTRACT_CMD = "{0}" + os.sep + "bin" + os.sep + "java -cp {1}" +\
+                          os.pathsep + "{2} " +\
+                          "org.apache.ambari.server.view.ViewRegistry " +\
+                          "> " + SERVER_OUT_FILE + " 2>&1"
+
+
 ULIMIT_CMD = "ulimit -n"
 SERVER_INIT_TIMEOUT = 5
 SERVER_START_TIMEOUT = 10
@@ -1215,6 +1223,21 @@ def prompt_db_properties(args):
     password=args.database_password
   ))
 
+# extract the system views
+def extract_views():
+  jdk_path = find_jdk()
+  if jdk_path is None:
+    print_error_msg("No JDK found, please run the \"setup\" "
+                    "command to install a JDK automatically or install any "
+                    "JDK manually to " + JDK_INSTALL_DIR)
+    return 1
+
+  command = VIEW_EXTRACT_CMD.format(jdk_path, get_conf_dir(),
+    get_ambari_classpath())
+  (retcode, stdout, stderr) = run_os_command(command)
+  print_info_msg("Return code from view extraction: " +
+                 str(retcode))
+  return retcode
 
 # Store set of properties for remote database connection
 def store_remote_properties(args):
@@ -2258,6 +2281,12 @@ def setup(args):
       raise FatalException(retcode, err)
     check_jdbc_drivers(args)
 
+  print 'Extracting system views...'
+  retcode = extract_views()
+  if not retcode == 0:
+    err = 'Error while extracting system views. Exiting'
+    raise FatalException(retcode, err)
+
 
 def proceedJDBCProperties(args):
   if not os.path.isfile(args.jdbc_driver):

http://git-wip-us.apache.org/repos/asf/ambari/blob/093ed17d/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProviderTest.java
index f6a49cf..fa342fe 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProviderTest.java
@@ -98,7 +98,7 @@ public class AmbariPrivilegeResourceProviderTest {
   @Before
   public void resetGlobalMocks() {
     ViewRegistry.initInstance(ViewRegistryTest.getRegistry(viewDAO, viewInstanceDAO, userDAO,
-        memberDAO, privilegeDAO, resourceDAO, resourceTypeDAO, securityHelper, handlerList));
+        memberDAO, privilegeDAO, resourceDAO, resourceTypeDAO, securityHelper, handlerList, null, null));
     reset(privilegeDAO, userDAO, groupDAO, principalDAO, permissionDAO, resourceDAO, clusterDAO, handlerList);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/093ed17d/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java
index eb4ef9c..0e9d3d6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java
@@ -90,7 +90,7 @@ public class ViewPrivilegeResourceProviderTest {
   public void resetGlobalMocks() {
 
     ViewRegistry.initInstance(ViewRegistryTest.getRegistry(viewDAO, viewInstanceDAO, userDAO,
-        memberDAO, privilegeDAO, resourceDAO, resourceTypeDAO, securityHelper, handlerList));
+        memberDAO, privilegeDAO, resourceDAO, resourceTypeDAO, securityHelper, handlerList, null, null));
     reset(privilegeDAO, userDAO, groupDAO, principalDAO, permissionDAO, resourceDAO, handlerList);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/093ed17d/ambari-server/src/test/java/org/apache/ambari/server/view/ViewExtractorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/view/ViewExtractorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/view/ViewExtractorTest.java
new file mode 100644
index 0000000..1b71c37
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/view/ViewExtractorTest.java
@@ -0,0 +1,262 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.view;
+
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.orm.dao.ViewDAO;
+import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
+import org.apache.ambari.server.orm.entities.ViewEntity;
+import org.apache.ambari.server.orm.entities.ViewEntityTest;
+import org.apache.ambari.server.view.configuration.ViewConfig;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import javax.xml.bind.JAXBException;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
+
+/**
+ * ViewExtractor tests.
+ */
+public class ViewExtractorTest {
+
+  private static final File extractedArchiveDir = createNiceMock(File.class);
+  private static final File viewArchive = createNiceMock(File.class);
+  private static final File archiveDir = createNiceMock(File.class);
+  private static final File entryFile  = createNiceMock(File.class);
+  private static final File classesDir = createNiceMock(File.class);
+  private static final File libDir = createNiceMock(File.class);
+  private static final JarFile viewJarFile = createNiceMock(JarFile.class);
+  private static final JarEntry jarEntry = createNiceMock(JarEntry.class);
+  private static final InputStream is = createMock(InputStream.class);
+  private static final FileOutputStream fos = createMock(FileOutputStream.class);
+  private static final Configuration configuration = createNiceMock(Configuration.class);
+  private static final File viewDir = createNiceMock(File.class);
+  private static final Enumeration<JarEntry> enumeration = createMock(Enumeration.class);
+  private static final File fileEntry = createNiceMock(File.class);
+  private static final ViewDAO viewDAO = createMock(ViewDAO.class);
+
+  @Before
+  public void resetGlobalMocks() {
+    reset(extractedArchiveDir, viewArchive,archiveDir,entryFile, classesDir, libDir, viewJarFile,
+        jarEntry, is, fos, configuration, viewDir, enumeration, fileEntry, viewDAO);
+  }
+
+  @Test
+  public void testExtractViewArchive() throws Exception {
+
+    ResourceTypeEntity resourceTypeEntity = new ResourceTypeEntity();
+    resourceTypeEntity.setId(10);
+    resourceTypeEntity.setName("MY_VIEW{1.0.0}");
+
+    ViewEntity viewDefinition = ViewEntityTest.getViewEntity();
+    viewDefinition.setResourceType(resourceTypeEntity);
+
+    // set expectations
+    expect(configuration.getViewExtractionThreadPoolCoreSize()).andReturn(2).anyTimes();
+    expect(configuration.getViewExtractionThreadPoolMaxSize()).andReturn(3).anyTimes();
+    expect(configuration.getViewExtractionThreadPoolTimeout()).andReturn(10000L).anyTimes();
+
+    expect(viewArchive.getAbsolutePath()).andReturn(
+        "/var/lib/ambari-server/resources/views/work/MY_VIEW{1.0.0}").anyTimes();
+
+    expect(archiveDir.exists()).andReturn(false);
+    expect(archiveDir.getAbsolutePath()).andReturn(
+        "/var/lib/ambari-server/resources/views/work/MY_VIEW{1.0.0}").anyTimes();
+    expect(archiveDir.mkdir()).andReturn(true);
+    expect(archiveDir.toURI()).andReturn(new URI("file:./"));
+
+    expect(viewJarFile.entries()).andReturn(enumeration);
+    expect(viewJarFile.getInputStream(jarEntry)).andReturn(is);
+
+    expect(enumeration.hasMoreElements()).andReturn(true);
+    expect(enumeration.hasMoreElements()).andReturn(false);
+    expect(enumeration.nextElement()).andReturn(jarEntry);
+
+    expect(jarEntry.getName()).andReturn("view.xml");
+
+    expect(is.available()).andReturn(1);
+    expect(is.available()).andReturn(0);
+
+    expect(is.read()).andReturn(10);
+    fos.write(10);
+
+    fos.close();
+    is.close();
+
+    expect(classesDir.exists()).andReturn(true);
+    expect(classesDir.toURI()).andReturn(new URI("file:./"));
+
+    expect(libDir.exists()).andReturn(true);
+
+    expect(libDir.listFiles()).andReturn(new File[]{fileEntry});
+    expect(fileEntry.toURI()).andReturn(new URI("file:./"));
+
+    replay(extractedArchiveDir, viewArchive, archiveDir, entryFile, classesDir, libDir, viewJarFile,
+        jarEntry, is, fos, configuration, viewDir, enumeration, fileEntry, viewDAO);
+
+    ViewExtractor viewExtractor = getViewExtractor(viewDefinition);
+    viewExtractor.extractViewArchive(viewDefinition, viewArchive, archiveDir);
+
+    verify(extractedArchiveDir, viewArchive, archiveDir, entryFile, classesDir, libDir, viewJarFile,
+        jarEntry, is, fos, configuration, viewDir, enumeration, fileEntry, viewDAO);
+  }
+
+  @Test
+  public void testEnsureExtractedArchiveDirectory() throws Exception {
+
+    ResourceTypeEntity resourceTypeEntity = new ResourceTypeEntity();
+    resourceTypeEntity.setId(10);
+    resourceTypeEntity.setName("MY_VIEW{1.0.0}");
+
+    ViewEntity viewDefinition = ViewEntityTest.getViewEntity();
+    viewDefinition.setResourceType(resourceTypeEntity);
+
+    expect(extractedArchiveDir.exists()).andReturn(true);
+
+    replay(extractedArchiveDir, viewArchive, archiveDir, entryFile, classesDir, libDir, viewJarFile,
+        jarEntry, is, fos, configuration, viewDir, enumeration, fileEntry, viewDAO);
+
+    ViewExtractor viewExtractor = getViewExtractor(viewDefinition);
+
+    Assert.assertTrue(viewExtractor.ensureExtractedArchiveDirectory("/var/lib/ambari-server/resources/views/work"));
+
+    verify(extractedArchiveDir, viewArchive, archiveDir, entryFile, classesDir, libDir, viewJarFile,
+        jarEntry, is, fos, configuration, viewDir, enumeration, fileEntry, viewDAO);
+
+    reset(extractedArchiveDir);
+
+    expect(extractedArchiveDir.exists()).andReturn(false);
+    expect(extractedArchiveDir.mkdir()).andReturn(true);
+
+    replay(extractedArchiveDir);
+
+    viewExtractor = getViewExtractor(viewDefinition);
+
+    Assert.assertTrue(viewExtractor.ensureExtractedArchiveDirectory("/var/lib/ambari-server/resources/views/work"));
+
+    verify(extractedArchiveDir);
+
+    reset(extractedArchiveDir);
+
+    expect(extractedArchiveDir.exists()).andReturn(false);
+    expect(extractedArchiveDir.mkdir()).andReturn(false);
+
+    replay(extractedArchiveDir);
+
+    viewExtractor = getViewExtractor(viewDefinition);
+
+    Assert.assertFalse(viewExtractor.ensureExtractedArchiveDirectory("/var/lib/ambari-server/resources/views/work"));
+
+    verify(extractedArchiveDir);
+  }
+
+  private ViewExtractor getViewExtractor(ViewEntity viewDefinition) throws Exception {
+
+    Map<File, ViewConfig> viewConfigs =
+        Collections.singletonMap(viewArchive, viewDefinition.getConfiguration());
+
+    Map<String, File> files = new HashMap<String, File>();
+
+    files.put("/var/lib/ambari-server/resources/views/work", extractedArchiveDir);
+    files.put("/var/lib/ambari-server/resources/views/work/MY_VIEW{1.0.0}", archiveDir);
+    files.put("/var/lib/ambari-server/resources/views/work/MY_VIEW{1.0.0}/view.xml", entryFile);
+    files.put("/var/lib/ambari-server/resources/views/work/MY_VIEW{1.0.0}/WEB-INF/classes", classesDir);
+    files.put("/var/lib/ambari-server/resources/views/work/MY_VIEW{1.0.0}/WEB-INF/lib", libDir);
+
+    Map<File, FileOutputStream> outputStreams = new HashMap<File, FileOutputStream>();
+    outputStreams.put(entryFile, fos);
+
+    Map<File, JarFile> jarFiles = new HashMap<File, JarFile>();
+    jarFiles.put(viewArchive, viewJarFile);
+
+    TestViewArchiveUtility archiveUtility = new TestViewArchiveUtility(viewConfigs, files, outputStreams, jarFiles);
+
+
+
+    ViewExtractor viewExtractor = new ViewExtractor();
+    viewExtractor.archiveUtility = archiveUtility;
+
+    return viewExtractor;
+  }
+
+  public static class TestViewArchiveUtility extends ViewArchiveUtility {
+    private final Map<File, ViewConfig> viewConfigs;
+    private final Map<String, File> files;
+    private final Map<File, FileOutputStream> outputStreams;
+    private final Map<File, JarFile> jarFiles;
+
+    public TestViewArchiveUtility(Map<File, ViewConfig> viewConfigs, Map<String, File> files, Map<File,
+        FileOutputStream> outputStreams, Map<File, JarFile> jarFiles) {
+      this.viewConfigs = viewConfigs;
+      this.files = files;
+      this.outputStreams = outputStreams;
+      this.jarFiles = jarFiles;
+    }
+
+    @Override
+    public ViewConfig getViewConfigFromArchive(File archiveFile) throws MalformedURLException, JAXBException {
+      return viewConfigs.get(archiveFile);
+    }
+
+    public ViewConfig getViewConfigFromExtractedArchive(String archivePath)
+        throws JAXBException, FileNotFoundException {
+      for (File viewConfigKey: viewConfigs.keySet()) {
+        if (viewConfigKey.getAbsolutePath().equals(archivePath)) {
+          return viewConfigs.get(viewConfigKey);
+        }
+      }
+      return null;
+    }
+
+    @Override
+    public File getFile(String path) {
+      return files.get(path);
+    }
+
+    @Override
+    public FileOutputStream getFileOutputStream(File file) throws FileNotFoundException {
+      return outputStreams.get(file);
+    }
+
+    @Override
+    public JarFile getJarFile(File file) throws IOException {
+      return jarFiles.get(file);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/093ed17d/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java b/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
index 38c2f9b..0915325 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
@@ -83,6 +83,7 @@ import org.apache.ambari.view.events.Event;
 import org.apache.ambari.view.events.Listener;
 import org.easymock.EasyMock;
 import org.junit.Assert;
+import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.springframework.security.core.GrantedAuthority;
@@ -164,9 +165,18 @@ public class ViewRegistryTest {
   private static final Configuration configuration = createNiceMock(Configuration.class);
   private static final ViewInstanceHandlerList handlerList = createNiceMock(ViewInstanceHandlerList.class);
 
+
+  @Before
+  public void resetGlobalMocks() {
+    ViewRegistry.initInstance(getRegistry(viewDAO, viewInstanceDAO, userDAO, memberDAO, privilegeDAO,
+        resourceDAO, resourceTypeDAO, securityHelper, handlerList, null, null));
+
+    reset(viewDAO, resourceDAO, viewInstanceDAO, userDAO, memberDAO,
+        privilegeDAO, resourceTypeDAO, securityHelper, configuration, handlerList);
+  }
+
   @Test
   public void testReadViewArchives() throws Exception {
-    ViewRegistry registry = getRegistry();
 
     File viewDir = createNiceMock(File.class);
     File extractedArchiveDir = createNiceMock(File.class);
@@ -275,7 +285,10 @@ public class ViewRegistryTest {
     replay(configuration, viewDir, extractedArchiveDir, viewArchive, archiveDir, entryFile, classesDir,
         libDir, fileEntry, viewJarFile, enumeration, jarEntry, is, fos, resourceDAO, viewDAO, viewInstanceDAO);
 
-    registry.setHelper(new TestViewRegistryHelper(viewConfigs, files, outputStreams, jarFiles));
+    TestViewArchiveUtility archiveUtility = new TestViewArchiveUtility(viewConfigs, files, outputStreams, jarFiles);
+
+    ViewRegistry registry = getRegistry(viewDAO, viewInstanceDAO, userDAO, memberDAO, privilegeDAO,
+        resourceDAO, resourceTypeDAO, securityHelper, handlerList, null, archiveUtility);
 
     registry.readViewArchives();
 
@@ -298,10 +311,8 @@ public class ViewRegistryTest {
         libDir, fileEntry, viewJarFile, enumeration, jarEntry, is, fos, resourceDAO, viewDAO, viewInstanceDAO);
   }
 
-  @Ignore
   @Test
   public void testReadViewArchives_exception() throws Exception {
-    ViewRegistry registry = getRegistry();
 
     File viewDir = createNiceMock(File.class);
     File extractedArchiveDir = createNiceMock(File.class);
@@ -357,6 +368,10 @@ public class ViewRegistryTest {
     expect(configuration.getViewsDir()).andReturn(viewDir);
     expect(viewDir.getAbsolutePath()).andReturn("/var/lib/ambari-server/resources/views");
 
+    expect(configuration.getViewExtractionThreadPoolCoreSize()).andReturn(2).anyTimes();
+    expect(configuration.getViewExtractionThreadPoolMaxSize()).andReturn(3).anyTimes();
+    expect(configuration.getViewExtractionThreadPoolTimeout()).andReturn(10000L).anyTimes();
+
     expect(viewDir.listFiles()).andReturn(new File[]{viewArchive});
 
     expect(viewArchive.isDirectory()).andReturn(false);
@@ -405,7 +420,10 @@ public class ViewRegistryTest {
     replay(configuration, viewDir, extractedArchiveDir, viewArchive, archiveDir, entryFile, classesDir,
         libDir, fileEntry, viewJarFile, enumeration, jarEntry, is, fos, viewDAO);
 
-    registry.setHelper(new TestViewRegistryHelper(viewConfigs, files, outputStreams, jarFiles));
+    TestViewArchiveUtility archiveUtility = new TestViewArchiveUtility(viewConfigs, files, outputStreams, jarFiles);
+
+    ViewRegistry registry = getRegistry(viewDAO, viewInstanceDAO, userDAO, memberDAO, privilegeDAO,
+        resourceDAO, resourceTypeDAO, securityHelper, handlerList, null, archiveUtility);
 
     registry.readViewArchives();
 
@@ -428,7 +446,7 @@ public class ViewRegistryTest {
 
   @Test
   public void testListener() throws Exception {
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     TestListener listener = new TestListener();
     registry.registerListener(listener, "MY_VIEW", "1.0.0");
@@ -460,7 +478,7 @@ public class ViewRegistryTest {
 
   @Test
   public void testListener_allVersions() throws Exception {
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     TestListener listener = new TestListener();
     registry.registerListener(listener, "MY_VIEW", null); // all versions of MY_VIEW
@@ -502,7 +520,7 @@ public class ViewRegistryTest {
   public void testAddGetDefinitions() throws Exception {
     ViewEntity viewDefinition = ViewEntityTest.getViewEntity();
 
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     registry.addDefinition(viewDefinition);
 
@@ -520,7 +538,7 @@ public class ViewRegistryTest {
     ViewEntity viewDefinition = ViewEntityTest.getViewEntity();
     ViewInstanceEntity viewInstanceDefinition = ViewInstanceEntityTest.getViewInstanceEntity();
 
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     registry.addDefinition(viewDefinition);
 
@@ -538,7 +556,7 @@ public class ViewRegistryTest {
   @Test
   public void testGetSubResourceDefinitions() throws Exception {
     ViewEntity viewDefinition = ViewEntityTest.getViewEntity();
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     ResourceConfig config = ResourceConfigTest.getResourceConfigs().get(0);
     Resource.Type type1 = new Resource.Type("myType");
@@ -556,7 +574,7 @@ public class ViewRegistryTest {
 
   @Test
   public void testAddInstanceDefinition() throws Exception {
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     ViewEntity viewEntity = ViewEntityTest.getViewEntity();
     InstanceConfig instanceConfig = InstanceConfigTest.getInstanceConfigs().get(0);
@@ -587,7 +605,7 @@ public class ViewRegistryTest {
   @Test
   public void testInstallViewInstance() throws Exception {
 
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     Properties properties = new Properties();
     properties.put("p1", "v1");
@@ -620,7 +638,7 @@ public class ViewRegistryTest {
   @Test
   public void testInstallViewInstance_invalid() throws Exception {
 
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     Properties properties = new Properties();
     properties.put("p1", "v1");
@@ -646,7 +664,7 @@ public class ViewRegistryTest {
   @Test
   public void testInstallViewInstance_unknownView() throws Exception {
 
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     Properties properties = new Properties();
     properties.put("p1", "v1");
@@ -673,7 +691,7 @@ public class ViewRegistryTest {
   @Test
   public void testUpdateViewInstance() throws Exception {
 
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     Properties properties = new Properties();
     properties.put("p1", "v1");
@@ -707,7 +725,7 @@ public class ViewRegistryTest {
   @Test
   public void testUninstallViewInstance() throws Exception {
 
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     Configuration ambariConfig = new Configuration(new Properties());
 
@@ -744,7 +762,7 @@ public class ViewRegistryTest {
   @Test
   public void testUpdateViewInstance_invalid() throws Exception {
 
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     Properties properties = new Properties();
     properties.put("p1", "v1");
@@ -777,7 +795,7 @@ public class ViewRegistryTest {
   @Test
   public void testRemoveInstanceData() throws Exception {
 
-    ViewRegistry registry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
 
     ViewInstanceEntity viewInstanceEntity = ViewInstanceEntityTest.getViewInstanceEntity();
 
@@ -797,7 +815,7 @@ public class ViewRegistryTest {
 
   @Test
   public void testIncludeDefinitionForAdmin() {
-    ViewRegistry viewRegistry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
     ViewEntity viewEntity = createNiceMock(ViewEntity.class);
     AmbariGrantedAuthority adminAuthority = createNiceMock(AmbariGrantedAuthority.class);
     PrivilegeEntity privilegeEntity = createNiceMock(PrivilegeEntity.class);
@@ -815,14 +833,14 @@ public class ViewRegistryTest {
     expect(configuration.getApiAuthentication()).andReturn(true);
     replay(securityHelper, adminAuthority, privilegeEntity, permissionEntity, configuration);
 
-    Assert.assertTrue(viewRegistry.includeDefinition(viewEntity));
+    Assert.assertTrue(registry.includeDefinition(viewEntity));
 
     verify(securityHelper, adminAuthority, privilegeEntity, permissionEntity, configuration);
   }
 
   @Test
   public void testIncludeDefinitionForUserNoInstances() {
-    ViewRegistry viewRegistry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
     ViewEntity viewEntity = createNiceMock(ViewEntity.class);
 
     Collection<GrantedAuthority> authorities = new ArrayList<GrantedAuthority>();
@@ -836,14 +854,14 @@ public class ViewRegistryTest {
     expect(configuration.getApiAuthentication()).andReturn(true);
     replay(securityHelper, viewEntity, configuration);
 
-    Assert.assertFalse(viewRegistry.includeDefinition(viewEntity));
+    Assert.assertFalse(registry.includeDefinition(viewEntity));
 
     verify(securityHelper, viewEntity, configuration);
   }
 
   @Test
   public void testIncludeDefinitionForUserHasAccess() {
-    ViewRegistry viewRegistry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
     ViewEntity viewEntity = createNiceMock(ViewEntity.class);
     ViewInstanceEntity instanceEntity = createNiceMock(ViewInstanceEntity.class);
     ResourceEntity resourceEntity = createNiceMock(ResourceEntity.class);
@@ -868,31 +886,31 @@ public class ViewRegistryTest {
     expect(configuration.getApiAuthentication()).andReturn(true);
     replay(securityHelper, viewEntity, instanceEntity, viewUseAuthority, privilegeEntity, permissionEntity, configuration);
 
-    Assert.assertTrue(viewRegistry.includeDefinition(viewEntity));
+    Assert.assertTrue(registry.includeDefinition(viewEntity));
 
     verify(securityHelper, viewEntity, instanceEntity, viewUseAuthority, privilegeEntity, permissionEntity, configuration);
   }
 
   @Test
   public void testIncludeDefinitionForNoApiAuthentication() {
-    ViewRegistry viewRegistry = getRegistry();
+    ViewRegistry registry = ViewRegistry.getInstance();
     ViewEntity viewEntity = createNiceMock(ViewEntity.class);
 
     expect(configuration.getApiAuthentication()).andReturn(false);
     replay(securityHelper, viewEntity, configuration);
 
-    Assert.assertTrue(viewRegistry.includeDefinition(viewEntity));
+    Assert.assertTrue(registry.includeDefinition(viewEntity));
 
     verify(securityHelper, viewEntity, configuration);
   }
 
-  public class TestViewRegistryHelper extends ViewRegistry.ViewRegistryHelper {
+  public static class TestViewArchiveUtility extends ViewArchiveUtility {
     private final Map<File, ViewConfig> viewConfigs;
     private final Map<String, File> files;
     private final Map<File, FileOutputStream> outputStreams;
     private final Map<File, JarFile> jarFiles;
 
-    public TestViewRegistryHelper(Map<File, ViewConfig> viewConfigs, Map<String, File> files, Map<File,
+    public TestViewArchiveUtility(Map<File, ViewConfig> viewConfigs, Map<String, File> files, Map<File,
         FileOutputStream> outputStreams, Map<File, JarFile> jarFiles) {
       this.viewConfigs = viewConfigs;
       this.files = files;
@@ -948,22 +966,12 @@ public class ViewRegistryTest {
     }
   }
 
-  private static ViewRegistry getRegistry() {
-    ViewRegistry instance = getRegistry(viewDAO, viewInstanceDAO,
-        userDAO, memberDAO, privilegeDAO,
-        resourceDAO, resourceTypeDAO, securityHelper, handlerList);
-
-    reset(viewDAO, resourceDAO, viewInstanceDAO, userDAO, memberDAO,
-        privilegeDAO, resourceTypeDAO, securityHelper, configuration, handlerList);
-
-    return instance;
-  }
-
   public static ViewRegistry getRegistry(ViewDAO viewDAO, ViewInstanceDAO viewInstanceDAO,
-                                  UserDAO userDAO, MemberDAO memberDAO,
-                                  PrivilegeDAO privilegeDAO, ResourceDAO resourceDAO,
-                                  ResourceTypeDAO resourceTypeDAO, SecurityHelper securityHelper,
-                                  ViewInstanceHandlerList handlerList) {
+                                         UserDAO userDAO, MemberDAO memberDAO,
+                                         PrivilegeDAO privilegeDAO, ResourceDAO resourceDAO,
+                                         ResourceTypeDAO resourceTypeDAO, SecurityHelper securityHelper,
+                                         ViewInstanceHandlerList handlerList,
+                                         ViewExtractor viewExtractor, ViewArchiveUtility archiveUtility) {
 
     ViewRegistry instance = new ViewRegistry();
 
@@ -977,13 +985,18 @@ public class ViewRegistryTest {
     instance.securityHelper = securityHelper;
     instance.configuration = configuration;
     instance.handlerList = handlerList;
+    instance.extractor = viewExtractor == null ? new ViewExtractor() : viewExtractor;
+    instance.archiveUtility = archiveUtility == null ? new ViewArchiveUtility() : archiveUtility;
+    instance.extractor.archiveUtility = instance.archiveUtility;
 
     return instance;
   }
 
   public static ViewEntity getViewEntity(ViewConfig viewConfig, Configuration ambariConfig,
                                      ClassLoader cl, String archivePath) throws Exception{
-    ViewRegistry registry = getRegistry();
+
+    ViewRegistry registry = getRegistry(viewDAO, viewInstanceDAO, userDAO, memberDAO, privilegeDAO,
+        resourceDAO, resourceTypeDAO, securityHelper, handlerList, null, null);
 
     ViewEntity viewDefinition = new ViewEntity(viewConfig, ambariConfig, archivePath);
 
@@ -993,7 +1006,9 @@ public class ViewRegistryTest {
   }
 
   public static ViewInstanceEntity getViewInstanceEntity(ViewEntity viewDefinition, InstanceConfig instanceConfig) throws Exception {
-    ViewRegistry registry = getRegistry();
+
+    ViewRegistry registry = getRegistry(viewDAO, viewInstanceDAO, userDAO, memberDAO, privilegeDAO,
+        resourceDAO, resourceTypeDAO, securityHelper, handlerList, null, null);
 
     ViewInstanceEntity viewInstanceDefinition =
         new ViewInstanceEntity(viewDefinition, instanceConfig);

http://git-wip-us.apache.org/repos/asf/ambari/blob/093ed17d/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index ebd691d..b11d88e 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -2369,7 +2369,8 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
   @patch.object(ambari_server, "is_root")
   @patch.object(ambari_server, 'is_server_runing')
   @patch.object(ambari_server, 'proceedJDBCProperties')
-  def test_setup(self, proceedJDBCProperties_mock, is_server_runing_mock, is_root_mock, store_local_properties_mock,
+  @patch.object(ambari_server, "extract_views")
+  def test_setup(self, extract_views_mock, proceedJDBCProperties_mock, is_server_runing_mock, is_root_mock, store_local_properties_mock,
                  is_local_database_mock, store_remote_properties_mock,
                  setup_remote_db_mock, check_selinux_mock, check_jdbc_drivers_mock, check_ambari_user_mock,
                  check_postgre_up_mock, setup_db_mock, configure_postgres_mock,
@@ -2387,6 +2388,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     get_os_type_mock.return_value = ""
     get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
     run_os_command_mock.return_value = 3,"",""
+    extract_views_mock.return_value = 0
 
     def reset_mocks():
       is_jdbc_user_changed_mock.reset_mock()
@@ -4966,9 +4968,10 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
   @patch.object(ambari_server, "setup_db")
   @patch.object(ambari_server, "get_is_secure")
   @patch.object(ambari_server, "store_password_file")
+  @patch.object(ambari_server, "extract_views")
   @patch("sys.exit")
   @patch('__builtin__.raw_input')
-  def test_ambariServerSetupWithCustomDbName(self, raw_input, exit_mock, store_password_file_mock,
+  def test_ambariServerSetupWithCustomDbName(self, raw_input, exit_mock, extract_views_mock, store_password_file_mock,
                                              get_is_secure_mock, setup_db_mock, is_root_mock, is_local_database_mock,
                                              check_selinux_mock, check_jdbc_drivers_mock, check_ambari_user_mock,
                                              check_postgre_up_mock, configure_postgres_mock,
@@ -4995,6 +4998,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     setup_db_mock.return_value = (0, None, None)
     get_is_secure_mock.return_value = False
     store_password_file_mock.return_value = "password"
+    extract_views_mock.return_value = 0
     get_os_type_mock.return_value = ""
     get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
     run_os_command_mock.return_value = 3,"",""


[16/30] git commit: AMBARI-7277 Slider View: Creating app without metrics should not show Metrics section. (atkach)

Posted by jo...@apache.org.
AMBARI-7277 Slider View: Creating app without metrics should not show Metrics section. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e10ec8b0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e10ec8b0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e10ec8b0

Branch: refs/heads/branch-alerts-dev
Commit: e10ec8b0943eda245c6ae6959b31430c45515f4a
Parents: 18f427d
Author: atkach <at...@hortonworks.com>
Authored: Fri Sep 12 14:48:05 2014 +0300
Committer: atkach <at...@hortonworks.com>
Committed: Fri Sep 12 14:48:05 2014 +0300

----------------------------------------------------------------------
 .../createAppWizard/step1_controller.js         | 26 +++++++++++---------
 .../createAppWizard/step3_controller.js         |  4 +++
 .../main/resources/ui/app/models/slider_app.js  |  7 +++++-
 .../app/templates/components/configSection.hbs  |  3 ++-
 4 files changed, 27 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e10ec8b0/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step1_controller.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step1_controller.js b/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step1_controller.js
index 9faaa32..9ffc432 100644
--- a/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step1_controller.js
+++ b/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step1_controller.js
@@ -139,18 +139,22 @@ App.CreateAppWizardStep1Controller = Ember.Controller.extend({
    */
   loadGangliaClustersSuccessCallback: function (data) {
     var gangliaCustomClusters = [];
+
     if (data.items[0]) {
-      //parse CSV string with cluster names and ports
-      Em.get(data.items[0].configurations[0].properties, 'ganglia_custom_clusters').replace(/\'/g, "").split(',').forEach(function(item, index){
-        if (index % 2 === 0) {
-          gangliaCustomClusters.push({
-            name: item
-          })
-        } else {
-          gangliaCustomClusters[gangliaCustomClusters.length - 1].port = parseInt(item);
-        }
-      });
-      App.set('gangliaClusters', gangliaCustomClusters);
+      var prop = Em.get(data.items[0].configurations[0].properties, 'ganglia_custom_clusters');
+      if (prop) {
+        //parse CSV string with cluster names and ports
+        prop.replace(/\'/g, "").split(',').forEach(function(item, index){
+          if (index % 2 === 0) {
+            gangliaCustomClusters.push({
+              name: item
+            })
+          } else {
+            gangliaCustomClusters[gangliaCustomClusters.length - 1].port = parseInt(item);
+          }
+        });
+        App.set('gangliaClusters', gangliaCustomClusters);
+      }
     }
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e10ec8b0/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step3_controller.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step3_controller.js b/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step3_controller.js
index a2eaa2e..2dd5b72 100644
--- a/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step3_controller.js
+++ b/contrib/views/slider/src/main/resources/ui/app/controllers/createAppWizard/step3_controller.js
@@ -76,6 +76,7 @@ App.CreateAppWizardStep3Controller = Ember.ObjectController.extend({
    */
   configsSet: [
     {
+      name: 'ganglia_metrics',
       trigger: {value: false, label: Em.I18n.t('configs.enable.metrics'), viewType: 'checkbox'},
       isSet: true,
       section: 'global',
@@ -152,6 +153,9 @@ App.CreateAppWizardStep3Controller = Ember.ObjectController.extend({
     configsSet.forEach(function (configSet) {
       if (configSet.configs.length === configSet.configNames.length) {
         delete configSet.configNames;
+        if (configSet.name === 'ganglia_metrics') {
+          configSet.trigger.readOnly = (!App.get('gangliaClusters') || App.get('gangliaClusters').length === 0);
+        }
         configSet.trigger = App.ConfigProperty.create(configSet.trigger);
         this.initConfigSetDependecies(configSet);
         configs.unshift(configSet);

http://git-wip-us.apache.org/repos/asf/ambari/blob/e10ec8b0/contrib/views/slider/src/main/resources/ui/app/models/slider_app.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/models/slider_app.js b/contrib/views/slider/src/main/resources/ui/app/models/slider_app.js
index bf02f30..3710877 100644
--- a/contrib/views/slider/src/main/resources/ui/app/models/slider_app.js
+++ b/contrib/views/slider/src/main/resources/ui/app/models/slider_app.js
@@ -112,8 +112,13 @@ App.SliderApp = DS.Model.extend({
    * @type {boolean}
    */
   showMetrics: function() {
+    var global = this.get('configs')['global'];
+    //check whether slider has GANGLIA configured if not metrics should be hidden
+    if (!(global['ganglia_server_host'] && global['ganglia_server_id'] && global['ganglia_server_port'])) {
+      return false;
+    }
     return App.SliderApp.Status.running === this.get('status');
-  }.property('status'),
+  }.property('status', 'configs'),
 
   /**
    * Map object to array

http://git-wip-us.apache.org/repos/asf/ambari/blob/e10ec8b0/contrib/views/slider/src/main/resources/ui/app/templates/components/configSection.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/templates/components/configSection.hbs b/contrib/views/slider/src/main/resources/ui/app/templates/components/configSection.hbs
index dca8f9d..326218a 100644
--- a/contrib/views/slider/src/main/resources/ui/app/templates/components/configSection.hbs
+++ b/contrib/views/slider/src/main/resources/ui/app/templates/components/configSection.hbs
@@ -26,7 +26,8 @@
 
                   <div class="col-sm-6">
                     {{view view.configSet.trigger.view
-                    checked=view.configSet.trigger.value
+                      checked=view.configSet.trigger.value
+                      disabled=view.configSet.trigger.readOnly
                     }}
                   </div>
               </div>


[15/30] git commit: AMBARI-7278. Could not load configs for Hive and Oozie at UI after upgrad from 1.4.4 to 1.7.0(vbrodetskyi)

Posted by jo...@apache.org.
AMBARI-7278. Could not load configs for Hive and Oozie at UI after upgrad from 1.4.4 to 1.7.0(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6d6f4dda
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6d6f4dda
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6d6f4dda

Branch: refs/heads/branch-alerts-dev
Commit: 6d6f4dda59c6d8ab76f459b8b16a835f07053195
Parents: e10ec8b
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Fri Sep 12 14:37:31 2014 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Fri Sep 12 14:37:31 2014 +0300

----------------------------------------------------------------------
 .../java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java   | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6d6f4dda/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index 0e8aea5..4a9b83f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -1004,6 +1004,8 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
     result.put("zookeeper_keytab_path","zookeeper-env");
     result.put("storm_principal_name","storm-env");
     result.put("storm_keytab","storm-env");
+    result.put("hive_hostname","hive-env");
+    result.put("oozie_hostname","oozie-env");
 
     return result;
   }


[30/30] git commit: Merge branch 'trunk' into branch-alerts-dev

Posted by jo...@apache.org.
Merge branch 'trunk' into branch-alerts-dev

Conflicts:
	ambari-server/conf/unix/ambari.properties
	ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/aaf05135
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/aaf05135
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/aaf05135

Branch: refs/heads/branch-alerts-dev
Commit: aaf051357de9493217fd5b2e05daa06c55a13132
Parents: 853497f 3355733
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Sep 12 19:52:41 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Sep 12 19:52:41 2014 -0400

----------------------------------------------------------------------
 .../main/resources/ui/admin-web/app/index.html  |   7 +-
 .../resources/ui/admin-web/app/scripts/app.js   |   3 +-
 .../controllers/ambariViews/ViewsEditCtrl.js    |   7 +-
 .../controllers/groups/GroupsEditCtrl.js        |   8 +-
 .../app/scripts/controllers/mainCtrl.js         |   2 +-
 .../scripts/controllers/users/UsersShowCtrl.js  |  18 +-
 .../ui/admin-web/app/scripts/i18n.config.js     |  29 ++
 .../resources/ui/admin-web/app/styles/main.css  |   9 +
 .../admin-web/app/views/ambariViews/edit.html   |  10 +-
 .../app/views/clusters/manageAccess.html        |   2 +-
 .../ui/admin-web/app/views/groups/edit.html     |   4 +-
 .../ui/admin-web/app/views/leftNavbar.html      |  15 +-
 .../ui/admin-web/app/views/users/show.html      |   8 +-
 .../src/main/resources/ui/admin-web/bower.json  |   4 +-
 .../libraries/providers/execute_hadoop.py       |   9 +-
 .../libraries/providers/hdfs_directory.py       |  16 +-
 .../libraries/resources/execute_hadoop.py       |   1 +
 .../libraries/resources/hdfs_directory.py       |   1 +
 ambari-server/conf/unix/ambari.properties       |   2 +-
 ambari-server/docs/api/v1/clusters-cluster.md   | 284 +++++++-----
 .../server/configuration/Configuration.java     |   5 +-
 .../internal/BaseBlueprintProcessor.java        |  13 +-
 .../ServiceConfigVersionResourceProvider.java   |   2 +-
 .../internal/StackAdvisorResourceProvider.java  |   6 +-
 .../server/upgrade/UpgradeCatalog170.java       |   2 +
 .../ambari/server/view/ViewArchiveUtility.java  | 120 ++++++
 .../ambari/server/view/ViewExtractor.java       | 223 ++++++++++
 .../apache/ambari/server/view/ViewRegistry.java | 429 ++++++-------------
 ambari-server/src/main/python/ambari-server.py  | 100 ++++-
 .../src/main/resources/ganglia_properties.json  |  40 ++
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |  20 +-
 .../hooks/before-INSTALL/scripts/params.py      |   3 +-
 .../hooks/before-START/files/checkForFormat.sh  |   3 +
 .../2.0.6/hooks/before-START/scripts/params.py  |  25 +-
 .../services/FLUME/package/scripts/flume.py     |   2 +-
 .../FLUME/package/scripts/flume_check.py        |   2 +-
 .../services/FLUME/package/scripts/params.py    |  14 +-
 .../HBASE/package/files/hbaseSmokeVerify.sh     |   3 +-
 .../services/HBASE/package/scripts/params.py    |  37 +-
 .../HBASE/package/scripts/service_check.py      |   6 +-
 .../HDFS/package/files/checkForFormat.sh        |   4 +-
 .../HDFS/package/scripts/hdfs_namenode.py       |  15 +-
 .../services/HDFS/package/scripts/namenode.py   |   2 +-
 .../services/HDFS/package/scripts/params.py     |  34 +-
 .../HDFS/package/scripts/service_check.py       |  27 +-
 .../2.0.6/services/HIVE/package/scripts/hcat.py |   6 +
 .../HIVE/package/scripts/hcat_service_check.py  |   8 +-
 .../2.0.6/services/HIVE/package/scripts/hive.py |   2 +
 .../HIVE/package/scripts/hive_service.py        |   9 +-
 .../HIVE/package/scripts/install_jars.py        |   6 +-
 .../services/HIVE/package/scripts/params.py     |  73 ++--
 .../package/templates/startHiveserver2.sh.j2    |   2 +-
 .../services/OOZIE/configuration/oozie-env.xml  |   2 +-
 .../services/OOZIE/package/files/oozieSmoke2.sh |   8 +-
 .../OOZIE/package/scripts/oozie_service.py      |   4 +-
 .../services/OOZIE/package/scripts/params.py    |  24 +-
 .../services/PIG/package/scripts/params.py      |  20 +-
 .../PIG/package/scripts/service_check.py        |  10 +-
 .../services/SQOOP/package/scripts/params.py    |  10 +-
 .../WEBHCAT/configuration/webhcat-env.xml       |   2 +-
 .../services/WEBHCAT/package/scripts/params.py  |  41 +-
 .../services/WEBHCAT/package/scripts/webhcat.py |  11 +-
 .../services/YARN/package/scripts/params.py     |  45 +-
 .../YARN/package/scripts/resourcemanager.py     |   5 +-
 .../services/YARN/package/scripts/service.py    |   2 +-
 .../YARN/package/scripts/service_check.py       |   3 +-
 .../2.0.6/services/YARN/package/scripts/yarn.py |  14 +-
 .../ZOOKEEPER/package/scripts/params.py         |  17 +-
 .../services/FALCON/package/scripts/params.py   |  15 +-
 .../services/STORM/package/scripts/params.py    |   5 +-
 .../main/resources/stacks/HDP/2.2/metainfo.xml  |  23 +
 .../resources/stacks/HDP/2.2/repos/repoinfo.xml |  82 ++++
 .../stacks/HDP/2.2/role_command_order.json      |  88 ++++
 .../stacks/HDP/2.2/services/FALCON/metainfo.xml |  28 ++
 .../stacks/HDP/2.2/services/FLUME/metainfo.xml  |  40 ++
 .../stacks/HDP/2.2/services/HBASE/metainfo.xml  |  42 ++
 .../services/HDFS/configuration/hadoop-env.xml  |  29 ++
 .../services/HDFS/configuration/hdfs-site.xml   |  34 ++
 .../stacks/HDP/2.2/services/HDFS/metainfo.xml   |  68 +++
 .../stacks/HDP/2.2/services/HIVE/metainfo.xml   |  44 ++
 .../services/OOZIE/configuration/oozie-site.xml |  45 ++
 .../stacks/HDP/2.2/services/OOZIE/metainfo.xml  |  28 ++
 .../stacks/HDP/2.2/services/PIG/metainfo.xml    |  41 ++
 .../stacks/HDP/2.2/services/SQOOP/metainfo.xml  |  29 ++
 .../services/STORM/configuration/storm-env.xml  |  29 ++
 .../services/STORM/configuration/storm-site.xml |  54 +++
 .../stacks/HDP/2.2/services/STORM/metainfo.xml  |  29 ++
 .../stacks/HDP/2.2/services/TEZ/metainfo.xml    |  40 ++
 .../WEBHCAT/configuration/webhcat-site.xml      |  59 +++
 .../HDP/2.2/services/WEBHCAT/metainfo.xml       |  44 ++
 .../YARN/configuration-mapred/mapred-site.xml   |  36 ++
 .../services/YARN/configuration/yarn-site.xml   |  35 ++
 .../stacks/HDP/2.2/services/YARN/metainfo.xml   |  71 +++
 .../HDP/2.2/services/ZOOKEEPER/metainfo.xml     |  40 ++
 .../AmbariPrivilegeResourceProviderTest.java    |   2 +-
 .../internal/BaseBlueprintProcessorTest.java    | 413 ++++++++++++++++++
 .../StackAdvisorResourceProviderTest.java       |  76 ++++
 .../ViewPrivilegeResourceProviderTest.java      |   2 +-
 ...mbariLdapAuthenticationProviderBaseTest.java |  50 +++
 ...uthenticationProviderForDNWithSpaceTest.java |   4 +-
 .../AmbariLdapAuthenticationProviderTest.java   |   4 +-
 .../ambari/server/view/ViewExtractorTest.java   | 262 +++++++++++
 .../ambari/server/view/ViewRegistryTest.java    | 105 +++--
 .../src/test/python/TestAmbariServer.py         |  60 ++-
 .../stacks/1.3.2/HDFS/test_service_check.py     |  18 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |   6 +
 .../2.0.6/HBASE/test_hbase_regionserver.py      |   6 +
 .../2.0.6/HBASE/test_hbase_service_check.py     |  10 +-
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |  36 +-
 .../stacks/2.0.6/HDFS/test_service_check.py     |  18 +-
 .../stacks/2.0.6/HIVE/test_hcat_client.py       |  10 +-
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |   8 +
 .../stacks/2.0.6/HIVE/test_hive_server.py       |  18 +
 .../2.0.6/HIVE/test_hive_service_check.py       |   7 +
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |  10 +-
 .../stacks/2.0.6/PIG/test_pig_service_check.py  |  12 +-
 .../stacks/2.0.6/WEBHCAT/test_webhcat_server.py |  12 +
 .../stacks/2.0.6/YARN/test_historyserver.py     |  12 +
 .../stacks/2.0.6/YARN/test_nodemanager.py       |  12 +
 .../2.0.6/YARN/test_yarn_service_check.py       |   9 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |   2 +
 .../stacks/2.1/HIVE/test_hive_metastore.py      |   7 +
 ambari-web/app/assets/test/tests.js             |   1 +
 .../highAvailability/progress_controller.js     |  15 +-
 .../main/dashboard/config_history_controller.js |   2 +-
 .../app/controllers/wizard/step7_controller.js  |   8 +-
 ambari-web/app/data/HDP2/secure_properties.js   |   6 +-
 ambari-web/app/data/secure_properties.js        |  15 +
 ambari-web/app/models/service_config_version.js |   5 +-
 ambari-web/app/styles/application.less          |  20 +-
 .../common/configs/config_history_flow.hbs      |   8 +-
 .../templates/main/dashboard/config_history.hbs |  14 +-
 ambari-web/app/utils/config.js                  |  14 +-
 .../views/main/dashboard/config_history_view.js |   8 +-
 .../progress_controller_test.js                 | 144 +++++++
 .../assets/data/resource/slider-properties.json |  64 +++
 .../ui/app/components/configSection.js          |   2 +-
 .../createAppWizard/step1_controller.js         |  26 +-
 .../createAppWizard/step3_controller.js         |   8 +-
 .../app/controllers/slider_apps_controller.js   | 187 --------
 .../ui/app/controllers/slider_controller.js     | 294 +++++++++++++
 .../src/main/resources/ui/app/helpers/ajax.js   |  44 +-
 .../src/main/resources/ui/app/initialize.js     |  16 +-
 .../ui/app/mappers/application_status.js        |  52 ++-
 .../main/resources/ui/app/models/slider_app.js  |   7 +-
 .../resources/ui/app/styles/application.less    |  49 ++-
 .../app/templates/components/configSection.hbs  |   3 +-
 .../ui/app/templates/createAppWizard/step3.hbs  |   2 +-
 .../ui/app/templates/slider_app/summary.hbs     | 200 ++++-----
 .../ui/app/views/createAppWizard/step3_view.js  |   3 +-
 .../ui/app/views/slider_app/summary_view.js     |   4 +-
 151 files changed, 4293 insertions(+), 1097 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/aaf05135/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------


[02/30] AMBARI-7257 Use Versioned RPMS for HDP 2.2 stack and make it plugabable to be able to reuse the scripts for HDP 2.* (dsen)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
index 6da9d2f..2a5481c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
@@ -28,7 +28,10 @@ class TestHcatClient(RMFTestCase):
                        command = "configure",
                        config_file="default.json"
     )
-
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+                              owner = 'hcat',
+                              group = 'hadoop',
+    )
     self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
       owner = 'hcat',
       group = 'hadoop',
@@ -59,7 +62,10 @@ class TestHcatClient(RMFTestCase):
                          command = "configure",
                          config_file="secured.json"
     )
-
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+                              owner = 'hcat',
+                              group = 'hadoop',
+    )
     self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
       owner = 'hcat',
       group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
index 0bef64d..204e384 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
@@ -17,6 +17,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 '''
+import os
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 
@@ -40,6 +41,8 @@ class TestHiveMetastore(RMFTestCase):
     self.assert_configure_default()
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
+                       'HADOOP_HOME' : '/usr'},
         user = 'hive',
     )
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -78,8 +81,11 @@ class TestHiveMetastore(RMFTestCase):
     )
 
     self.assert_configure_secured()
+    self.maxDiff = None
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
+                       'HADOOP_HOME' : '/usr'},
         user = 'hive',
     )
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -196,6 +202,7 @@ class TestHiveMetastore(RMFTestCase):
     )
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
         creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
         path = ['/bin', '/usr/bin/'],
         not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
     )
@@ -322,6 +329,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
         creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
         path = ['/bin', '/usr/bin/'],
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
         not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
     )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 1c7e47e..bc723ab 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -17,6 +17,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 '''
+import os
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 
@@ -53,6 +54,7 @@ class TestHiveServer(RMFTestCase):
                               keytab = UnknownConfigurationMock(),
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
+                              bin_dir = '/usr/bin',
                               kinit_path_local = "/usr/bin/kinit"
     )
 
@@ -64,6 +66,7 @@ class TestHiveServer(RMFTestCase):
                               keytab = UnknownConfigurationMock(),
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
+                              bin_dir = '/usr/bin',
                               kinit_path_local = "/usr/bin/kinit"
     )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -72,6 +75,7 @@ class TestHiveServer(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
                               action = ['create']
                               )
 
@@ -80,6 +84,7 @@ class TestHiveServer(RMFTestCase):
                               owner='tez',
                               dest_dir='/apps/tez/',
                               kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
                               hdfs_user='hdfs'
     )
 
@@ -88,11 +93,14 @@ class TestHiveServer(RMFTestCase):
                               owner='tez',
                               dest_dir='/apps/tez/lib/',
                               kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
                               hdfs_user='hdfs'
     )
 
     self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
+                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
+                                             'HADOOP_HOME' : '/usr'},
                               user = 'hive'
     )
 
@@ -144,6 +152,8 @@ class TestHiveServer(RMFTestCase):
     self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
+                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
+                                             'HADOOP_HOME' : '/usr'},
                               user = 'hive'
     )
 
@@ -180,6 +190,7 @@ class TestHiveServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         mode = 0777,
         owner = 'hive',
+        bin_dir = '/usr/bin',
         action = ['create_delayed'],
     )
     self.assertResourceCalled('HdfsDirectory', '/user/hive',
@@ -190,6 +201,7 @@ class TestHiveServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         mode = 0700,
         owner = 'hive',
+        bin_dir = '/usr/bin',
         action = ['create_delayed'],
     )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -198,6 +210,7 @@ class TestHiveServer(RMFTestCase):
         conf_dir = '/etc/hadoop/conf',
         hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
+        bin_dir = '/usr/bin',
         action = ['create'],
     )
     self.assertResourceCalled('Directory', '/etc/hive/conf.server',
@@ -295,6 +308,7 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
         creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
         path = ['/bin', '/usr/bin/'],
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
         not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
     )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
@@ -331,6 +345,7 @@ class TestHiveServer(RMFTestCase):
         conf_dir = '/etc/hadoop/conf',
         hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
+        bin_dir = '/usr/bin',
         mode = 0777,
         owner = 'hive',
         action = ['create_delayed'],
@@ -342,6 +357,7 @@ class TestHiveServer(RMFTestCase):
         hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
         mode = 0700,
+        bin_dir = '/usr/bin',
         owner = 'hive',
         action = ['create_delayed'],
     )
@@ -350,6 +366,7 @@ class TestHiveServer(RMFTestCase):
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         conf_dir = '/etc/hadoop/conf',
         hdfs_user = 'hdfs',
+        bin_dir = '/usr/bin',
         kinit_path_local = '/usr/bin/kinit',
         action = ['create'],
     )
@@ -448,6 +465,7 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
         creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
         path = ['/bin', '/usr/bin/'],
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
         not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
     )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index eefb6b9..4ae9ad2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -17,6 +17,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 '''
+import os
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 import datetime, sys, socket
@@ -42,6 +43,7 @@ class TestServiceCheck(RMFTestCase):
                         path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
                         tries = 3,
                         user = 'ambari-qa',
+                        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
                         try_sleep = 5,
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
@@ -50,6 +52,7 @@ class TestServiceCheck(RMFTestCase):
                         conf_dir = '/etc/hadoop/conf',
                         keytab=UnknownConfigurationMock(),
                         kinit_path_local='/usr/bin/kinit',
+                        bin_dir = '/usr/lib/hive/bin',
                         security_enabled=False
     )
     self.assertResourceCalled('Execute', ' /tmp/hcatSmoke.sh hcatsmoke cleanup',
@@ -57,6 +60,7 @@ class TestServiceCheck(RMFTestCase):
                         path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
                         tries = 3,
                         user = 'ambari-qa',
+                        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
                         try_sleep = 5,
     )
     self.assertNoMoreResources()
@@ -78,6 +82,7 @@ class TestServiceCheck(RMFTestCase):
                         path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
                         tries = 3,
                         user = 'ambari-qa',
+                        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
                         try_sleep = 5,
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
@@ -87,6 +92,7 @@ class TestServiceCheck(RMFTestCase):
                         keytab='/etc/security/keytabs/hdfs.headless.keytab',
                         kinit_path_local='/usr/bin/kinit',
                         security_enabled=True,
+                        bin_dir = '/usr/lib/hive/bin',
                         principal='hdfs'
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa;  /tmp/hcatSmoke.sh hcatsmoke cleanup',
@@ -94,6 +100,7 @@ class TestServiceCheck(RMFTestCase):
                         path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
                         tries = 3,
                         user = 'ambari-qa',
+                        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
                         try_sleep = 5,
     )
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index 1b98e45..5740587 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -45,8 +45,8 @@ class TestOozieServer(RMFTestCase):
         ignore_failures = True,
         user = 'oozie',
         )
-    self.assertResourceCalled('Execute', ' hadoop dfs -put /usr/lib/oozie/share /user/oozie ; hadoop dfs -chmod -R 755 /user/oozie/share',
-        not_if = " hadoop dfs -ls /user/oozie/share | awk 'BEGIN {count=0;} /share/ {count++} END {if (count > 0) {exit 0} else {exit 1}}'",
+    self.assertResourceCalled('Execute', ' hadoop --config /etc/hadoop/conf dfs -put /usr/lib/oozie/share /user/oozie ; hadoop --config /etc/hadoop/conf dfs -chmod -R 755 /user/oozie/share',
+        not_if = " hadoop --config /etc/hadoop/conf dfs -ls /user/oozie/share | awk 'BEGIN {count=0;} /share/ {count++} END {if (count > 0) {exit 0} else {exit 1}}'",
         user = 'oozie',
         )
     self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-start.sh',
@@ -91,8 +91,8 @@ class TestOozieServer(RMFTestCase):
                               ignore_failures = True,
                               user = 'oozie',
                               )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/oozie.service.keytab oozie/c6402.ambari.apache.org@EXAMPLE.COM; hadoop dfs -put /usr/lib/oozie/share /user/oozie ; hadoop dfs -chmod -R 755 /user/oozie/share',
-                              not_if = "/usr/bin/kinit -kt /etc/security/keytabs/oozie.service.keytab oozie/c6402.ambari.apache.org@EXAMPLE.COM; hadoop dfs -ls /user/oozie/share | awk 'BEGIN {count=0;} /share/ {count++} END {if (count > 0) {exit 0} else {exit 1}}'",
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/oozie.service.keytab oozie/c6402.ambari.apache.org@EXAMPLE.COM; hadoop --config /etc/hadoop/conf dfs -put /usr/lib/oozie/share /user/oozie ; hadoop --config /etc/hadoop/conf dfs -chmod -R 755 /user/oozie/share',
+                              not_if = "/usr/bin/kinit -kt /etc/security/keytabs/oozie.service.keytab oozie/c6402.ambari.apache.org@EXAMPLE.COM; hadoop --config /etc/hadoop/conf dfs -ls /user/oozie/share | awk 'BEGIN {count=0;} /share/ {count++} END {if (count > 0) {exit 0} else {exit 1}}'",
                               user = 'oozie',
                               )
     self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-start.sh',
@@ -122,6 +122,7 @@ class TestOozieServer(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0775,
                               owner = 'oozie',
+                              bin_dir = '/usr/bin',
                               action = ['create'],
     )
     self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
@@ -224,6 +225,7 @@ class TestOozieServer(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0775,
                               owner = 'oozie',
+                              bin_dir = '/usr/bin',
                               action = ['create'],
                               )
     self.assertResourceCalled('XmlConfig', 'oozie-site.xml',

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
index 1e1ad24..2521636 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
@@ -28,13 +28,14 @@ class TestPigServiceCheck(RMFTestCase):
                        command = "service_check",
                        config_file="default.json"
     )
-    self.assertResourceCalled('ExecuteHadoop', 'dfs -rmr pigsmoke.out passwd; hadoop dfs -put /etc/passwd passwd ',
+    self.assertResourceCalled('ExecuteHadoop', 'dfs -rmr pigsmoke.out passwd; hadoop --config /etc/hadoop/conf dfs -put /etc/passwd passwd ',
       try_sleep = 5,
       tries = 3,
       user = 'ambari-qa',
       conf_dir = '/etc/hadoop/conf',
       security_enabled = False,
       keytab = UnknownConfigurationMock(),
+      bin_dir = '/usr/bin',
       kinit_path_local = '/usr/bin/kinit'
     )
        
@@ -44,7 +45,7 @@ class TestPigServiceCheck(RMFTestCase):
     )
        
     self.assertResourceCalled('Execute', 'pig /tmp/pigSmoke.sh',
-      path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+      path = [':/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
       tries = 3,
       user = 'ambari-qa',
       try_sleep = 5,
@@ -52,6 +53,7 @@ class TestPigServiceCheck(RMFTestCase):
        
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e pigsmoke.out',
       user = 'ambari-qa',
+      bin_dir = '/usr/bin',
       conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
@@ -63,13 +65,14 @@ class TestPigServiceCheck(RMFTestCase):
                        config_file="secured.json"
     )
     
-    self.assertResourceCalled('ExecuteHadoop', 'dfs -rmr pigsmoke.out passwd; hadoop dfs -put /etc/passwd passwd ',
+    self.assertResourceCalled('ExecuteHadoop', 'dfs -rmr pigsmoke.out passwd; hadoop --config /etc/hadoop/conf dfs -put /etc/passwd passwd ',
       try_sleep = 5,
       tries = 3,
       user = 'ambari-qa',
       conf_dir = '/etc/hadoop/conf',
       security_enabled = True, 
       keytab = '/etc/security/keytabs/smokeuser.headless.keytab',
+      bin_dir = '/usr/bin',
       kinit_path_local = '/usr/bin/kinit'
     )
        
@@ -79,7 +82,7 @@ class TestPigServiceCheck(RMFTestCase):
     )
        
     self.assertResourceCalled('Execute', 'pig /tmp/pigSmoke.sh',
-      path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+      path = [':/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
       tries = 3,
       user = 'ambari-qa',
       try_sleep = 5,
@@ -87,6 +90,7 @@ class TestPigServiceCheck(RMFTestCase):
        
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e pigsmoke.out',
       user = 'ambari-qa',
+      bin_dir = '/usr/bin',
       conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_server.py b/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_server.py
index 0e96b66..bde2e86 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_server.py
@@ -107,6 +107,7 @@ class TestWebHCatServer(RMFTestCase):
                               kinit_path_local = "/usr/bin/kinit",
                               mode = 0755,
                               owner = 'hcat',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/user/hcat',
@@ -117,6 +118,7 @@ class TestWebHCatServer(RMFTestCase):
                               kinit_path_local = "/usr/bin/kinit",
                               mode = 0755,
                               owner = 'hcat',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -125,6 +127,7 @@ class TestWebHCatServer(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
                               action = ['create'],
                               )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
@@ -160,6 +163,7 @@ class TestWebHCatServer(RMFTestCase):
                               mode=0755,
                               dest_dir='/apps/webhcat',
                               kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
                               hdfs_user='hdfs'
     )
     self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
@@ -167,6 +171,7 @@ class TestWebHCatServer(RMFTestCase):
                               mode=0755,
                               dest_dir='/apps/webhcat',
                               kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
                               hdfs_user='hdfs'
     )
     self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
@@ -174,6 +179,7 @@ class TestWebHCatServer(RMFTestCase):
                               mode=0755,
                               dest_dir='/apps/webhcat',
                               kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
                               hdfs_user='hdfs'
     )
 
@@ -186,6 +192,7 @@ class TestWebHCatServer(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0755,
                               owner = 'hcat',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/user/hcat',
@@ -196,6 +203,7 @@ class TestWebHCatServer(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0755,
                               owner = 'hcat',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -204,6 +212,7 @@ class TestWebHCatServer(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
                               action = ['create'],
                               )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
@@ -243,6 +252,7 @@ class TestWebHCatServer(RMFTestCase):
                               mode=0755,
                               dest_dir='/apps/webhcat',
                               kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
                               hdfs_user='hdfs'
     )
     self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
@@ -250,6 +260,7 @@ class TestWebHCatServer(RMFTestCase):
                               mode=0755,
                               dest_dir='/apps/webhcat',
                               kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
                               hdfs_user='hdfs'
     )
     self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
@@ -257,5 +268,6 @@ class TestWebHCatServer(RMFTestCase):
                               mode=0755,
                               dest_dir='/apps/webhcat',
                               kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
                               hdfs_user='hdfs'
     )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 155e07d..738ffc1 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -124,6 +124,7 @@ class TestHistoryServer(RMFTestCase):
                               group = 'hadoop',
                               action = ['create_delayed'],
                               mode = 0777,
+                              bin_dir = '/usr/bin'
                               )
     self.assertResourceCalled('HdfsDirectory', '/mapred',
                               security_enabled = False,
@@ -132,6 +133,7 @@ class TestHistoryServer(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
                               owner = 'mapred',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/mapred/system',
@@ -141,6 +143,7 @@ class TestHistoryServer(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
                               owner = 'hdfs',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
@@ -152,6 +155,7 @@ class TestHistoryServer(RMFTestCase):
                               mode = 0777,
                               owner = 'mapred',
                               group = 'hadoop',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
@@ -163,6 +167,7 @@ class TestHistoryServer(RMFTestCase):
                               mode = 01777,
                               owner = 'mapred',
                               group = 'hadoop',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -171,6 +176,7 @@ class TestHistoryServer(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
                               action = ['create'],
                               )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
@@ -314,6 +320,7 @@ class TestHistoryServer(RMFTestCase):
                               owner = 'yarn',
                               group = 'hadoop',
                               action = ['create_delayed'],
+                              bin_dir = '/usr/bin',
                               mode = 0777,
                               )
     self.assertResourceCalled('HdfsDirectory', '/mapred',
@@ -323,6 +330,7 @@ class TestHistoryServer(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               owner = 'mapred',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/mapred/system',
@@ -332,6 +340,7 @@ class TestHistoryServer(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               owner = 'hdfs',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
@@ -343,6 +352,7 @@ class TestHistoryServer(RMFTestCase):
                               mode = 0777,
                               owner = 'mapred',
                               group = 'hadoop',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
@@ -354,6 +364,7 @@ class TestHistoryServer(RMFTestCase):
                               mode = 01777,
                               owner = 'mapred',
                               group = 'hadoop',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -362,6 +373,7 @@ class TestHistoryServer(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
                               action = ['create'],
                               )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index 4723b0f..5f15d91 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -120,6 +120,7 @@ class TestNodeManager(RMFTestCase):
                               owner = 'yarn',
                               group = 'hadoop',
                               action = ['create_delayed'],
+                              bin_dir = '/usr/bin',
                               mode = 0777,
                               )
     self.assertResourceCalled('HdfsDirectory', '/mapred',
@@ -129,6 +130,7 @@ class TestNodeManager(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
                               owner = 'mapred',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/mapred/system',
@@ -138,6 +140,7 @@ class TestNodeManager(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
                               owner = 'hdfs',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
@@ -149,6 +152,7 @@ class TestNodeManager(RMFTestCase):
                               mode = 0777,
                               owner = 'mapred',
                               group = 'hadoop',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
@@ -160,6 +164,7 @@ class TestNodeManager(RMFTestCase):
                               mode = 01777,
                               owner = 'mapred',
                               group = 'hadoop',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -168,6 +173,7 @@ class TestNodeManager(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
                               action = ['create'],
                               )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
@@ -311,6 +317,7 @@ class TestNodeManager(RMFTestCase):
                               owner = 'yarn',
                               group = 'hadoop',
                               action = ['create_delayed'],
+                              bin_dir = '/usr/bin',
                               mode = 0777,
                               )
     self.assertResourceCalled('HdfsDirectory', '/mapred',
@@ -320,6 +327,7 @@ class TestNodeManager(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               owner = 'mapred',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/mapred/system',
@@ -329,6 +337,7 @@ class TestNodeManager(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               owner = 'hdfs',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
@@ -338,6 +347,7 @@ class TestNodeManager(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0777,
+                              bin_dir = '/usr/bin',
                               owner = 'mapred',
                               group = 'hadoop',
                               action = ['create_delayed'],
@@ -349,6 +359,7 @@ class TestNodeManager(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 01777,
+                              bin_dir = '/usr/bin',
                               owner = 'mapred',
                               group = 'hadoop',
                               action = ['create_delayed'],
@@ -357,6 +368,7 @@ class TestNodeManager(RMFTestCase):
                               security_enabled = True,
                               keytab = '/etc/security/keytabs/hdfs.headless.keytab',
                               conf_dir = '/etc/hadoop/conf',
+                              bin_dir = '/usr/bin',
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               action = ['create'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py
index 7c4c01a..65ea0a5 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py
@@ -17,6 +17,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 '''
+import os
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 
@@ -41,8 +42,9 @@ class TestServiceCheck(RMFTestCase):
                           user = 'ambari-qa',
                           try_sleep = 5,
     )
-    self.assertResourceCalled('Execute', '/usr/bin/yarn node -list',
-                          user = 'ambari-qa',
+    self.assertResourceCalled('Execute', 'yarn --config /etc/hadoop/conf node -list',
+                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/bin"},
+                              user = 'ambari-qa',
     )
     self.assertNoMoreResources()
 
@@ -63,7 +65,8 @@ class TestServiceCheck(RMFTestCase):
                           user = 'ambari-qa',
                           try_sleep = 5,
     )
-    self.assertResourceCalled('Execute', '/usr/bin/yarn node -list',
+    self.assertResourceCalled('Execute', 'yarn --config /etc/hadoop/conf node -list',
+                          environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/bin"},
                           user = 'ambari-qa',
     )
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
index 93ea2d1..2282dcc 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
@@ -94,6 +94,7 @@ class TestFalconServer(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0777,
                               owner = 'falcon',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -102,6 +103,7 @@ class TestFalconServer(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
                               action = ['create'],
                               )
     self.assertResourceCalled('Directory', '/hadoop/falcon',

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
index 77909a7..47423ff 100644
--- a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
@@ -17,6 +17,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 '''
+import os
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 
@@ -40,6 +41,8 @@ class TestHiveMetastore(RMFTestCase):
     self.assert_configure_default()
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
+                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
+                                             'HADOOP_HOME' : '/usr'},
                               user = 'hive'
     )
 
@@ -82,6 +85,8 @@ class TestHiveMetastore(RMFTestCase):
     self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
+                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
+                                             'HADOOP_HOME' : '/usr'},
                               user = 'hive'
     )
 
@@ -175,6 +180,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
         creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
         path = ['/bin', '/usr/bin/'],
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
         not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
     )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
@@ -279,6 +285,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
         creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
         path = ['/bin', '/usr/bin/'],
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
         not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
     )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',


[24/30] git commit: AMBARI-7214. Upgrade to Ambari 1.7.0 requires updating server.jdbc.database_name property in ambari.properties (alejandro)

Posted by jo...@apache.org.
AMBARI-7214. Upgrade to Ambari 1.7.0 requires updating server.jdbc.database_name property in ambari.properties (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bd28cd9e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bd28cd9e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bd28cd9e

Branch: refs/heads/branch-alerts-dev
Commit: bd28cd9e80dc6026e7c4dfa768b4f14318185644
Parents: 3cf2ee4
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Thu Sep 11 16:21:42 2014 -0700
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Fri Sep 12 10:22:50 2014 -0700

----------------------------------------------------------------------
 .../server/configuration/Configuration.java     |  1 -
 ambari-server/src/main/python/ambari-server.py  | 71 +++++++++++++++-----
 .../src/test/python/TestAmbariServer.py         | 52 +++++++++++---
 3 files changed, 98 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bd28cd9e/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index a3a07b0..78fd7b6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -106,7 +106,6 @@ public class Configuration {
   public static final String CLIENT_API_SSL_CRT_PASS_FILE_NAME_KEY = "client.api.ssl.cert_pass_file";
   public static final String CLIENT_API_SSL_CRT_PASS_KEY = "client.api.ssl.crt_pass";
   public static final String CLIENT_API_SSL_KEY_NAME_KEY = "client.api.ssl.key_name";
-  public static final String SERVER_DB_TYPE_KEY = "server.jdbc.database";       // E.g., oracle|mysql|postgres
   public static final String SERVER_DB_NAME_KEY = "server.jdbc.database_name";
   public static final String SERVER_DB_NAME_DEFAULT = "ambari";
   public static final String SERVER_JDBC_POSTGRES_SCHEMA_NAME = "server.jdbc.postgres.schema";

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd28cd9e/ambari-server/src/main/python/ambari-server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari-server.py b/ambari-server/src/main/python/ambari-server.py
index 35ba17b..50420b2 100755
--- a/ambari-server/src/main/python/ambari-server.py
+++ b/ambari-server/src/main/python/ambari-server.py
@@ -304,7 +304,7 @@ DATABASE_INDEX = 0
 PROMPT_DATABASE_OPTIONS = False
 USERNAME_PATTERN = "^[a-zA-Z_][a-zA-Z0-9_\-]*$"
 PASSWORD_PATTERN = "^[a-zA-Z0-9_-]*$"
-DATABASE_NAMES = ["postgres", "oracle", "mysql"]
+DATABASE_TYPES = ["postgres", "oracle", "mysql"]
 DATABASE_STORAGE_NAMES = ["Database", "Service", "Database"]
 DATABASE_PORTS = ["5432", "1521", "3306"]
 DATABASE_DRIVER_NAMES = ["org.postgresql.Driver", "oracle.jdbc.driver.OracleDriver", "com.mysql.jdbc.Driver"]
@@ -838,8 +838,6 @@ def restart_postgres():
   return 0, "", ""
 
 
-# todo: check if the scheme is already exist
-
 def write_property(key, value):
   conf_file = find_properties_file()
   properties = Properties()
@@ -1087,7 +1085,7 @@ def get_pass_file_path(conf_file):
 # Set database properties to default values
 def load_default_db_properties(args):
   args.persistence_type = 'local'
-  args.dbms = DATABASE_NAMES[DATABASE_INDEX]
+  args.dbms = DATABASE_TYPES[DATABASE_INDEX]
   args.database_host = "localhost"
   args.database_port = DATABASE_PORTS[DATABASE_INDEX]
   args.database_name = DEFAULT_DB_NAME
@@ -1141,7 +1139,7 @@ def prompt_db_properties(args):
       pass
 
       DATABASE_INDEX = args.database_index
-      args.dbms = DATABASE_NAMES[args.database_index]
+      args.dbms = DATABASE_TYPES[args.database_index]
 
       if args.persistence_type != 'local':
         args.database_host = get_validated_string_input(
@@ -1386,12 +1384,48 @@ def configure_database_password(showDefault=True):
   return password
 
 
-def check_database_name_property():
+def get_ambari_version(properties):
+  """
+  :param properties: Ambari properties
+  :return: Return a string of the ambari version. When comparing versions, please use "compare_versions" function.
+  """
+  version = None
+  try:
+    server_version_file_path = properties[SERVER_VERSION_FILE_PATH]
+    if server_version_file_path and os.path.exists(server_version_file_path):
+      with open(server_version_file_path, 'r') as file:
+        version = file.read().strip()
+  except:
+    print_error_msg("Error getting ambari version")
+  return version
+
+
+def check_database_name_property(args, upgrade=False):
+  """
+  :param upgrade: If Ambari is being upgraded.
+  :return:
+  """
   properties = get_ambari_properties()
   if properties == -1:
     print_error_msg("Error getting ambari properties")
     return -1
 
+  version = get_ambari_version(properties)
+  if upgrade and compare_versions(version, "1.7.0") >= 0:
+
+    expected_db_name = properties[JDBC_DATABASE_NAME_PROPERTY]
+    # The existing ambari config file is probably from an earlier version of Ambari, and needs to be transformed.
+    if expected_db_name is None or expected_db_name == "":
+      db_name = properties[JDBC_DATABASE_PROPERTY]
+
+      if db_name:
+        write_property(JDBC_DATABASE_NAME_PROPERTY, db_name)
+        remove_property(JDBC_DATABASE_PROPERTY)
+        properties = get_ambari_properties()
+      else:
+        err = "DB Name property not set in config file.\n" + SETUP_OR_UPGRADE_MSG
+        raise FatalException(-1, "Upgrade to version %s cannot transform config file." % str(version))
+
   dbname = properties[JDBC_DATABASE_NAME_PROPERTY]
   if dbname is None or dbname == "":
     err = "DB Name property not set in config file.\n" + SETUP_OR_UPGRADE_MSG
@@ -1523,7 +1557,7 @@ def parse_properties_file(args):
     args.database_port = properties[JDBC_PORT_PROPERTY]
     global DATABASE_INDEX
     try:
-      DATABASE_INDEX = DATABASE_NAMES.index(args.dbms)
+      DATABASE_INDEX = DATABASE_TYPES.index(args.dbms)
     except ValueError:
       pass
 
@@ -2297,7 +2331,7 @@ def reset(args):
     err = "Ambari Server 'reset' cancelled"
     raise FatalException(1, err)
 
-  check_database_name_property()
+  check_database_name_property(args)
   parse_properties_file(args)
 
   if args.persistence_type == "remote":
@@ -2370,7 +2404,7 @@ def start(args):
           "command as root, as sudo or as user \"{1}\"".format(current_user, ambari_user)
     raise FatalException(1, err)
 
-  check_database_name_property()
+  check_database_name_property(args)
   parse_properties_file(args)
 
   status, pid = is_server_runing()
@@ -2540,7 +2574,7 @@ def upgrade_stack(args, stack_id, repo_url=None, repo_url_os=None):
     err = 'Ambari-server upgradestack should be run with ' \
           'root-level privileges'
     raise FatalException(4, err)
-  check_database_name_property()
+  check_database_name_property(args)
 
   stack_name, stack_version = stack_id.split(STACK_NAME_VER_SEP)
   retcode = run_stack_upgrade(stack_name, stack_version, repo_url, repo_url_os)
@@ -2726,7 +2760,7 @@ def upgrade(args):
     raise FatalException(retcode, err)
 
   try:
-    check_database_name_property()
+    check_database_name_property(args, upgrade=True)
   except FatalException:
     properties = get_ambari_properties()
     if properties == -1:
@@ -4170,12 +4204,12 @@ def main():
     options.database_index = 0
     DATABASE_INDEX = 0
     pass
-  elif options.dbms is not None and options.dbms not in DATABASE_NAMES:
+  elif options.dbms is not None and options.dbms not in DATABASE_TYPES:
     parser.print_help()
     parser.error("Unsupported Database " + options.dbms)
   elif options.dbms is not None:
     options.dbms = options.dbms.lower()
-    DATABASE_INDEX = DATABASE_NAMES.index(options.dbms)
+    DATABASE_INDEX = DATABASE_TYPES.index(options.dbms)
 
   #correct port
   if options.database_port is not None:
@@ -4339,6 +4373,9 @@ class Properties(object):
       self.process_pair(key, value)
 
   def process_pair(self, key, value):
+    """
+    Adds or overrides the property with the given key.
+    """
     oldkey = key
     oldvalue = value
     keyparts = self.bspacere.split(key)
@@ -4407,7 +4444,9 @@ class Properties(object):
 
   def store(self, out, header=""):
     """ Write the properties list to the stream 'out' along
-    with the optional 'header' """
+    with the optional 'header'
+    This function will attempt to close the file handler once it's done.
+    """
     if out.mode[0] != 'w':
       raise ValueError, 'Steam should be opened in write mode!'
     try:
@@ -4420,9 +4459,11 @@ class Properties(object):
       for prop, val in self._origprops.items():
         if val is not None:
           out.write(''.join((prop, '=', val, '\n')))
-      out.close()
     except IOError:
       raise
+    finally:
+      if out:
+        out.close()
 
 if __name__ == "__main__":
   try:

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd28cd9e/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index 53f0ba1..ebd691d 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -27,6 +27,7 @@ import stat
 import datetime
 import operator
 import json
+from optparse import OptionParser
 import platform
 import shutil
 from pwd import getpwnam
@@ -43,7 +44,7 @@ with patch("platform.linux_distribution", return_value = ('Suse','11','Final')):
 
 FatalException = ambari_server.FatalException
 NonFatalException = ambari_server.NonFatalException
-
+CURR_AMBARI_VERSION = "1.7.0"
 
 class TestAmbariServer(TestCase):
   def setUp(self):
@@ -3176,6 +3177,32 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     self.assertEquals(os_symlink_mock.call_args_list[0][0][1], os.path.join("somewhere","postgres-jdbc-driver.jar"))
 
 
+  @patch.object(ambari_server, "write_property")
+  @patch.object(ambari_server, "find_properties_file")
+  @patch.object(ambari_server, "is_root")
+  @patch.object(ambari_server, "get_ambari_version")
+  @patch.object(ambari_server, "get_ambari_properties")
+  def test_upgrade_from_161(self, get_ambari_properties_mock, get_ambari_version_mock, is_root_mock, find_properties_file_mock,
+                            write_property_mock):
+    args = MagicMock()
+    args.dbms = "postgres"
+    is_root_mock.return_value = True
+
+    # In Ambari 1.6.1, the DB name was actually stored in JDBC_DATABASE_PROPERTY, and the JDBC_DATABASE_NAME_PROPERTY
+    # property didn't exist. When upgrading to Ambari 1.7.0, the ambari.properties file should be transformed.
+    get_ambari_version_mock.return_value = "1.7.0"
+
+    properties = ambari_server.Properties()
+    properties.process_pair(ambari_server.JDBC_DATABASE_PROPERTY, "ambari")
+    get_ambari_properties_mock.return_value = properties
+
+    try:
+      ambari_server.upgrade(args)
+    except FatalException as fe:
+      self.fail("Did not expect failure: " + str(fe))
+    else:
+      self.assertTrue(write_property_mock.called)
+
 
   @patch("__builtin__.open")
   @patch("os.path.isfile")
@@ -3190,11 +3217,12 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
   @patch.object(ambari_server, "run_schema_upgrade")
   @patch.object(ambari_server, "update_ambari_properties")
   @patch.object(ambari_server, "parse_properties_file")
+  @patch.object(ambari_server, "get_ambari_version")
   @patch.object(ambari_server, "is_root")
   @patch.object(ambari_server, "get_ambari_properties")
   @patch.object(ambari_server, "upgrade_local_repo")
   def test_upgrade(self, upgrade_local_repo_mock,
-                   get_ambari_properties_mock, is_root_mock,
+                   get_ambari_properties_mock, is_root_mock, get_ambari_version_mock,
                    parse_properties_file_mock,
                    update_ambari_properties_mock, run_schema_upgrade_mock,
                    read_ambari_user_mock, print_warning_msg_mock,
@@ -3204,10 +3232,10 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
 
     args = MagicMock()
     check_database_name_property_mock = MagicMock()
-
     update_ambari_properties_mock.return_value = 0
     run_schema_upgrade_mock.return_value = 0
     isfile_mock.return_value = False
+    get_ambari_version_mock.return_value = CURR_AMBARI_VERSION
 
     # Testing call under non-root
     is_root_mock.return_value = False
@@ -3262,7 +3290,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     p = MagicMock()
     get_ambari_properties_mock.reset_mock()
     get_ambari_properties_mock.return_value = p
-    p.__getitem__.side_effect = ["something", KeyError("test exception")]
+    p.__getitem__.side_effect = ["something", "something", KeyError("test exception")]
     fail = False
 
     try:
@@ -3271,9 +3299,9 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
       fail = True
     self.assertTrue(fail)
 
-    # test if some drivers are available in reources, and symlink available too
+    # test if some drivers are available in resources, and symlink available too
     p.reset_mock()
-    p.__getitem__.side_effect = ["something", "resources"]
+    p.__getitem__.side_effect = ["something", "something", "resources"]
     lexists_mock.return_value = True
     isfile_mock.side_effect = [True, False, False]
     ambari_server.upgrade(args)
@@ -3281,8 +3309,8 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     self.assertEquals(os_remove_mock.call_count, 1)
     self.assertEquals(os_remove_mock.call_args[0][0], os.path.join("resources", "oracle-jdbc-driver.jar"))
     self.assertEquals(os_symlink_mock.call_count, 1)
-    self.assertEquals(os_symlink_mock.call_args[0][0], os.path.join("resources","ojdbc6.jar"))
-    self.assertEquals(os_symlink_mock.call_args[0][1], os.path.join("resources","oracle-jdbc-driver.jar"))
+    self.assertEquals(os_symlink_mock.call_args[0][0], os.path.join("resources", "ojdbc6.jar"))
+    self.assertEquals(os_symlink_mock.call_args[0][1], os.path.join("resources", "oracle-jdbc-driver.jar"))
 
 
   def test_print_info_msg(self):
@@ -4898,10 +4926,14 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
 
   @patch.object(ambari_server, "get_ambari_properties")
   def test_check_database_name_property(self, get_ambari_properties_mock):
+    parser = OptionParser()
+    parser.add_option('--database', default=None, help="Database to use embedded|oracle|mysql|postgres", dest="dbms")
+    args = parser.parse_args()
+
     # negative case
     get_ambari_properties_mock.return_value = {ambari_server.JDBC_DATABASE_NAME_PROPERTY: ""}
     try:
-      result = ambari_server.check_database_name_property()
+      result = ambari_server.check_database_name_property(args)
       self.fail("Should fail with exception")
     except FatalException as e:
       self.assertTrue('DB Name property not set in config file.' in e.reason)
@@ -4911,7 +4943,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     get_ambari_properties_mock.reset_mock()
     get_ambari_properties_mock.return_value = {ambari_server.JDBC_DATABASE_NAME_PROPERTY: dbname}
     try:
-      result = ambari_server.check_database_name_property()
+      result = ambari_server.check_database_name_property(args)
     except FatalException:
       self.fail("Setup should be successful")
 


[08/30] git commit: AMBARI-7180 ambari build failure caused by ApacheDSContainer's working directory conflict (jun aoki via alejandro)

Posted by jo...@apache.org.
AMBARI-7180 ambari build failure caused by ApacheDSContainer's working directory conflict (jun aoki via alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/91a01f1c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/91a01f1c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/91a01f1c

Branch: refs/heads/branch-alerts-dev
Commit: 91a01f1cd548ff35883c5a4f894a524d21a835ea
Parents: 9ed5e3b
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Thu Sep 11 15:32:43 2014 -0700
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Thu Sep 11 15:32:43 2014 -0700

----------------------------------------------------------------------
 ...mbariLdapAuthenticationProviderBaseTest.java | 50 ++++++++++++++++++++
 ...uthenticationProviderForDNWithSpaceTest.java |  4 +-
 .../AmbariLdapAuthenticationProviderTest.java   |  4 +-
 3 files changed, 54 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/91a01f1c/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderBaseTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderBaseTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderBaseTest.java
new file mode 100644
index 0000000..2a323f8
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderBaseTest.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authorization;
+
+import org.apache.commons.io.FileUtils;
+import org.easymock.EasyMockSupport;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class AmbariLdapAuthenticationProviderBaseTest extends EasyMockSupport {
+
+  private static final Log logger = LogFactory.getLog(AmbariLdapAuthenticationProviderBaseTest.class);
+
+  protected static void createCleanApacheDSContainerWorkDir() throws IOException{
+    // Set ApacheDsContainer's work dir under the current folder (Jenkins' job workspace) instead of the default /tmp/apacheds-spring-security. See AMBARI-7180
+    SimpleDateFormat sdf = new SimpleDateFormat("HHmmss");
+    String timestamp = sdf.format(new Date());
+    final String workParent = new File(".").getAbsolutePath() + File.separator + "target" + File.separator + timestamp;
+    new File(workParent).mkdirs();
+    // The folder structure looks like {job-root}/target/{timestamp}/apacheds-spring-security
+    final String apacheDSWorkDir = workParent + File.separator + "apacheds-spring-security";
+    FileUtils.deleteDirectory(new File(apacheDSWorkDir));
+    System.setProperty("apacheDSWorkDir", apacheDSWorkDir );
+    logger.info("System property apacheDSWorkDir was set to " + apacheDSWorkDir);
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/91a01f1c/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderForDNWithSpaceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderForDNWithSpaceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderForDNWithSpaceTest.java
index ae8054b..c04af86 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderForDNWithSpaceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderForDNWithSpaceTest.java
@@ -24,7 +24,6 @@ import com.google.inject.persist.PersistService;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.dao.UserDAO;
-import org.apache.ambari.server.orm.entities.UserEntity;
 import org.apache.ambari.server.security.ClientSecurityType;
 import org.junit.*;
 import org.springframework.security.authentication.BadCredentialsException;
@@ -34,7 +33,7 @@ import org.springframework.security.ldap.server.ApacheDSContainer;
 
 import static org.junit.Assert.*;
 
-public class AmbariLdapAuthenticationProviderForDNWithSpaceTest {
+public class AmbariLdapAuthenticationProviderForDNWithSpaceTest extends AmbariLdapAuthenticationProviderBaseTest{
   private static ApacheDSContainer apacheDSContainer;
   private static Injector injector;
 
@@ -47,6 +46,7 @@ public class AmbariLdapAuthenticationProviderForDNWithSpaceTest {
 
   @BeforeClass
   public static void beforeClass() throws Exception{
+    createCleanApacheDSContainerWorkDir();
     apacheDSContainer = new ApacheDSContainer("dc=ambari,dc=the apache,dc=org", "classpath:/users_for_dn_with_space.ldif");
     apacheDSContainer.setPort(33389);
     apacheDSContainer.afterPropertiesSet();

http://git-wip-us.apache.org/repos/asf/ambari/blob/91a01f1c/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderTest.java
index 2a2d3dd..6bc692c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProviderTest.java
@@ -27,7 +27,6 @@ import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.dao.UserDAO;
 import org.apache.ambari.server.security.ClientSecurityType;
-import org.easymock.EasyMockSupport;
 import org.easymock.IAnswer;
 import org.junit.After;
 import org.junit.AfterClass;
@@ -43,7 +42,7 @@ import static org.easymock.EasyMock.*;
 
 import static org.junit.Assert.*;
 
-public class AmbariLdapAuthenticationProviderTest extends EasyMockSupport {
+public class AmbariLdapAuthenticationProviderTest extends AmbariLdapAuthenticationProviderBaseTest {
 
   private static ApacheDSContainer apacheDSContainer;
   private static Injector injector;
@@ -57,6 +56,7 @@ public class AmbariLdapAuthenticationProviderTest extends EasyMockSupport {
 
   @BeforeClass
   public static void beforeClass() throws Exception{
+    createCleanApacheDSContainerWorkDir();
     apacheDSContainer = new ApacheDSContainer("dc=ambari,dc=apache,dc=org", "classpath:/users.ldif");
     apacheDSContainer.setPort(33389);
     apacheDSContainer.afterPropertiesSet();


[13/30] git commit: AMBARI-7269. StackAdvisorResourceProvider ignores configurations with array type values

Posted by jo...@apache.org.
AMBARI-7269. StackAdvisorResourceProvider ignores configurations with array type values


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/59765552
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/59765552
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/59765552

Branch: refs/heads/branch-alerts-dev
Commit: 59765552c48ec83ad2a54077dc00e69aecd85e8b
Parents: 10dd42e
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Thu Sep 11 17:07:47 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Thu Sep 11 18:19:13 2014 -0700

----------------------------------------------------------------------
 .../internal/StackAdvisorResourceProvider.java  |  6 +-
 .../StackAdvisorResourceProviderTest.java       | 76 ++++++++++++++++++++
 .../resources/ui/app/styles/application.less    |  8 +++
 3 files changed, 87 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/59765552/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java
index 40b423e..6ae557d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java
@@ -176,9 +176,9 @@ public abstract class StackAdvisorResourceProvider extends ReadOnlyResourceProvi
     return map;
   }
 
-  private static final String CONFIGURATIONS_PROPERTY_ID = "recommendations/blueprint/configurations/";
+  protected static final String CONFIGURATIONS_PROPERTY_ID = "recommendations/blueprint/configurations/";
 
-  private Map<String, Map<String, Map<String, String>>> calculateConfigurations(Request request) {
+  protected Map<String, Map<String, Map<String, String>>> calculateConfigurations(Request request) {
     Map<String, Map<String, Map<String, String>>> configurations = new HashMap<String, Map<String, Map<String, String>>>();
     Map<String, Object> properties = request.getProperties().iterator().next();
     for (String property : properties.keySet()) {
@@ -202,7 +202,7 @@ public abstract class StackAdvisorResourceProvider extends ReadOnlyResourceProvi
             siteMap.put(propertiesProperty, propertiesMap);
           }
 
-          String value = (String) properties.get(property);
+          String value = properties.get(property).toString();
           propertiesMap.put(propertyName, value);
         } catch (Exception e) {
           LOG.debug(String.format("Error handling configuration property, name = %s", property), e);

http://git-wip-us.apache.org/repos/asf/ambari/blob/59765552/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java
new file mode 100644
index 0000000..8c5337b
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.apache.ambari.server.controller.internal.StackAdvisorResourceProvider.CONFIGURATIONS_PROPERTY_ID;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+
+public class StackAdvisorResourceProviderTest {
+
+  @Test
+  public void testCalculateConfigurations() throws Exception {
+
+    Map<Resource.Type, String> keyPropertyIds = Collections.emptyMap();
+    Set<String> propertyIds = Collections.emptySet();
+    AmbariManagementController ambariManagementController = mock(AmbariManagementController.class);
+    RecommendationResourceProvider provider = new RecommendationResourceProvider(propertyIds,
+        keyPropertyIds, ambariManagementController);
+
+    Request request = mock(Request.class);
+    Set<Map<String, Object>> propertiesSet = new HashSet<Map<String, Object>>();
+    Map<String, Object> propertiesMap = new HashMap<String, Object>();
+    propertiesMap.put(CONFIGURATIONS_PROPERTY_ID + "site/properties/string_prop", "string");
+    List<Object> array = new ArrayList<Object>();
+    array.add("array1");
+    array.add("array2");
+    propertiesMap.put(CONFIGURATIONS_PROPERTY_ID + "site/properties/array_prop", array);
+    propertiesSet.add(propertiesMap);
+
+    doReturn(propertiesSet).when(request).getProperties();
+
+    Map<String, Map<String, Map<String, String>>> calculatedConfigurations = provider.calculateConfigurations(request);
+
+    assertNotNull(calculatedConfigurations);
+    assertEquals(1, calculatedConfigurations.size());
+    Map<String, Map<String, String>> site = calculatedConfigurations.get("site");
+    assertNotNull(site);
+    assertEquals(1, site.size());
+    Map<String, String> properties = site.get("properties");
+    assertNotNull(properties);
+    assertEquals(2, properties.size());
+    assertEquals("string", properties.get("string_prop"));
+    assertEquals("[array1, array2]", properties.get("array_prop"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/59765552/contrib/views/slider/src/main/resources/ui/app/styles/application.less
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/styles/application.less b/contrib/views/slider/src/main/resources/ui/app/styles/application.less
index f3c4df9..f2d6260 100644
--- a/contrib/views/slider/src/main/resources/ui/app/styles/application.less
+++ b/contrib/views/slider/src/main/resources/ui/app/styles/application.less
@@ -636,6 +636,14 @@ a {
       padding-right: 5px;
       resize: none;
     }
+    textarea[disabled] {
+      cursor: not-allowed;
+      background-color: #eeeeee;
+    }
+    input[disabled] {
+      cursor: not-allowed;
+      background-color: #eeeeee;
+    }
   }
 }
 


[26/30] git commit: AMBARI-7274 - Detailed response in 'view cluster information' under Server API documentation (Saurabh Jain via tbeerbower)

Posted by jo...@apache.org.
AMBARI-7274 - Detailed response in 'view cluster information' under Server API documentation (Saurabh Jain via tbeerbower)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fc569f19
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fc569f19
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fc569f19

Branch: refs/heads/branch-alerts-dev
Commit: fc569f191f5a44994e68d4e54aae0ff80192935d
Parents: 093ed17
Author: tbeerbower <tb...@hortonworks.com>
Authored: Fri Sep 12 15:16:58 2014 -0400
Committer: tbeerbower <tb...@hortonworks.com>
Committed: Fri Sep 12 15:16:58 2014 -0400

----------------------------------------------------------------------
 ambari-server/docs/api/v1/clusters-cluster.md | 284 ++++++++++++++-------
 1 file changed, 188 insertions(+), 96 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fc569f19/ambari-server/docs/api/v1/clusters-cluster.md
----------------------------------------------------------------------
diff --git a/ambari-server/docs/api/v1/clusters-cluster.md b/ambari-server/docs/api/v1/clusters-cluster.md
index 0a57e43..8da5b29 100644
--- a/ambari-server/docs/api/v1/clusters-cluster.md
+++ b/ambari-server/docs/api/v1/clusters-cluster.md
@@ -60,105 +60,197 @@ Returns information for the specified cluster identified by ":name"
 
 **Example**
 
-Get information for the cluster "c1".
+Get information for the cluster "cluster001".
 
-    GET /clusters/c1
+    GET /clusters/cluster001
     
     200 OK
     {
-    	"href" : "http://your.ambari.server/api/v1/clusters/c1",
-      	"Clusters" : {
-        	"cluster_name" : "c1",
-        	"cluster_id" : 1,
-        	"version" : "HDP-1.2.0"
-      	},
-      	"services" : [
-        	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/NAGIOS",
-        		"ServiceInfo" : {
-          			"cluster_name" : "c1",
-          			"service_name" : "NAGIOS"
-          		}
-        	},
-        	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/HCATALOG",
-        		"ServiceInfo" : {
-          			"cluster_name" : "c1",
-          			"service_name" : "HCATALOG"
-          		}
-        	},
-        	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/PIG",
-        		"ServiceInfo" : {
-          			"cluster_name" : "c1",
-         			"service_name" : "PIG"
-          		}
-        	},
-        	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/MAPREDUCE",
-        		"ServiceInfo" : {
-          			"cluster_name" : "c1",
-          			"service_name" : "MAPREDUCE"
-          		}
-        	},
-        	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/GANGLIA",
-        		"ServiceInfo" : {
-          			"cluster_name" : "c1",
-          			"service_name" : "GANGLIA"
-          		}
-        	},
-        	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/HIVE",
-        		"ServiceInfo" : {
-          			"cluster_name" : "c1",
-          			"service_name" : "HIVE"
-          		}
-        	},
-        	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/HDFS",
-        		"ServiceInfo" : {
-          			"cluster_name" : "MyIE9",
-          			"service_name" : "HDFS"
-          		}
-        	},
-        	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/ZOOKEEPER",
-        		"ServiceInfo" : {
-          			"cluster_name" : "c1",
-         	 		"service_name" : "ZOOKEEPER"
-          		}
-        	},
-        	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/HBASE",
-        		"ServiceInfo" : {
-          			"cluster_name" : "c1",
-          			"service_name" : "HBASE"
-          		}
-        	},
-        	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/OOZIE",
-        		"ServiceInfo" : {
-          			"cluster_name" : "c1",
-          			"service_name" : "OOZIE"
-          		}
-        	} 
+    	"href" : "http://your.ambari.server/api/v1/clusters/cluster001",
+    	"Clusters" : {
+    		"cluster_id" : 9,
+    		"cluster_name" : "cluster001",
+    		"health_report" : {
+    			"Host/stale_config" : 1,
+    			"Host/maintenance_state" : 0,
+    			"Host/host_state/HEALTHY" : 3,
+    			"Host/host_state/UNHEALTHY" : 0,
+    			"Host/host_state/HEARTBEAT_LOST" : 0,
+    			"Host/host_state/INIT" : 0,
+    			"Host/host_status/HEALTHY" : 3,
+    			"Host/host_status/UNHEALTHY" : 0,
+    			"Host/host_status/UNKNOWN" : 0,
+    			"Host/host_status/ALERT" : 0
+    		},
+    		"provisioning_state" : "INIT",
+    		"total_hosts" : 3,
+    		"version" : "HDP-2.0",
+    		"desired_configs" : {
+    			"capacity-scheduler" : {
+    				"user" : "admin",
+    				"tag" : "version1408514705943"
+    			},
+    			"core-site" : {
+    				"user" : "admin",
+    				"tag" : "version1409806913314"
+    			},
+    			"global" : {
+    				"user" : "admin",
+    				"tag" : "version1409806913314"
+    			},
+    			"hdfs-log4j" : {
+    				"user" : "admin",
+    				"tag" : "version1"
+    			},
+    			"hdfs-site" : {
+    				"user" : "admin",
+    				"tag" : "version1407908591996"
+    			},
+    			"mapred-site" : {
+    				"user" : "admin",
+    				"tag" : "version1408514705943"
+    			},
+    			"mapreduce2-log4j" : {
+    				"user" : "admin",
+    				"tag" : "version1408514705943"
+    			},
+    			"yarn-log4j" : {
+    				"user" : "admin",
+    				"tag" : "version1408514705943"
+    			},
+    			"yarn-site" : {
+    				"user" : "admin",
+    				"tag" : "version1408514705943"
+    			},
+    			"zoo.cfg" : {
+    				"user" : "admin",
+    				"tag" : "version1"
+    			},
+    			"zookeeper-log4j" : {
+    				"user" : "admin",
+    				"tag" : "version1"
+    			}
+    		}
+    	},
+    	"alerts" : {
+    		"summary" : {
+    			"CRITICAL" : 1,
+    			"OK" : 2,
+    			"PASSIVE" : 0,
+    			"WARNING" : 0
+    		}
+    	},
+    	"requests" : [
+    		{
+    			"href" : "http://your.ambari.server/api/v1/clusters/cluster001/requests/304",
+    			"Requests" : {
+    			"cluster_name" : "cluster001",
+    			"id" : 304
+    			}
+    		},
+    		{
+    			"href" : "http://your.ambari.server/api/v1/clusters/cluster001/requests/305",
+    			"Requests" : {
+    			"cluster_name" : "cluster001",
+    			"id" : 305
+    			}
+    		}
+    		],
+    	"services" : [
+    	{
+    		"href" : "http://your.ambari.server/api/v1/clusters/cluster001/services/GANGLIA",
+    		"ServiceInfo" : {
+    		"cluster_name" : "cluster001",
+    		"service_name" : "GANGLIA"
+    		}
+    	},
+    	{
+    		"href" : "http://your.ambari.server/api/v1/clusters/cluster001/services/HDFS",
+    		"ServiceInfo" : {
+    		"cluster_name" : "cluster001",
+    		"service_name" : "HDFS"
+    		}
+    	},
+    	{
+    		"href" : "http://your.ambari.server/api/v1/clusters/cluster001/services/MAPREDUCE2",
+    		"ServiceInfo" : {
+    		"cluster_name" : "cluster001",
+    		"service_name" : "MAPREDUCE2"
+    		}
+    	},
+    	{
+    		"href" : "http://your.ambari.server/api/v1/clusters/cluster001/services/ZOOKEEPER",
+    		"ServiceInfo" : {
+    		"cluster_name" : "cluster001",
+    		"service_name" : "ZOOKEEPER"
+    		}
+    	}
     	],
-      "hosts" : [
-        {
-          "href" : "http://your.ambari.server/api/v1/clusters/c1/hosts/some.host",
-          "Hosts" : {
-              "cluster_name" : "c1",
-              "host_name" : "some.host"
-          }
-        },
-        {
-          "href" : "http://your.ambari.server/api/v1/clusters/c1/hosts/another.host",
-          "Hosts" : {
-              "cluster_name" : "c1",
-              "host_name" : "another.host"
-          }
-        }
-      ]
+    	"config_groups" : [
+    	{
+    		"href" : "http://your.ambari.server/api/v1/clusters/cluster001/config_groups/2",
+    		"ConfigGroup" : {
+    		 "cluster_name" : "cluster001",
+    		  "id" : 2
+    		}
+    	}
+    	],
+    	"workflows" : [ ],
+    	"hosts" : [
+    	{
+    		"href" : "http://your.ambari.server/api/v1/clusters/cluster001/hosts/host1.domain.com",
+    		"Hosts" : {
+    		  "cluster_name" : "cluster001",
+    		  "host_name" : "host1.domain.com"
+    		}
+    	},
+    	{
+    		"href" : "http://your.ambari.server/api/v1/clusters/cluster001/hosts/host2.domain.com",
+    		"Hosts" : {
+    		  "cluster_name" : "cluster001",
+    		  "host_name" : "host2.domain.com"
+    		}
+    	},
+    	{
+    		"href" : "http://your.ambari.server/api/v1/clusters/cluster001/hosts/host3.domain.com",
+    		"Hosts" : {
+    		  "cluster_name" : "cluster001",
+    		  "host_name" : "host3.domain.com"
+    		}
+    	}
+    	],
+    	"configurations" : [
+    	{
+    		"href" : "http://your.ambari.server/api/v1/clusters/cluster001/configurations?type=core-site&tag=version1",
+    		"tag" : "version1",
+    		"type" : "core-site",
+    		"Config" : {
+    		  "cluster_name" : "cluster001"
+    		}
+    	},
+    	{
+    		"href" : "http://your.ambari.server/api/v1/clusters/cluster001/configurations?type=global&tag=version1",
+    		"tag" : "version1",
+    		"type" : "global",
+    		"Config" : {
+    		  "cluster_name" : "cluster001"
+    		}
+    	},
+    	{
+    		"href" : "http://your.ambari.server/api/v1/clusters/cluster001/configurations?type=hdfs-site&tag=version1",
+    		"tag" : "version1",
+    		"type" : "hdfs-site",
+    		"Config" : {
+    		  "cluster_name" : "cluster001"
+    		}
+    	},
+    	{
+    		"href" : "http://your.ambari.server/api/v1/clusters/cluster001/configurations?type=zoo.cfg&tag=version1",
+    		"tag" : "version1",
+    		"type" : "zoo.cfg",
+    		"Config" : {
+    		  "cluster_name" : "cluster001"
+    		}
+    	},
+    	]
     }
-


[12/30] git commit: AMBARI-7273. Admin View: cluster rename controls look a bit out of place.(xi wang)

Posted by jo...@apache.org.
AMBARI-7273. Admin View: cluster rename controls look a bit out of place.(xi wang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/10dd42ea
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/10dd42ea
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/10dd42ea

Branch: refs/heads/branch-alerts-dev
Commit: 10dd42eab0e6ee83e4c1181a3126681863c558e5
Parents: e71d4fa
Author: Xi Wang <xi...@apache.org>
Authored: Thu Sep 11 17:26:51 2014 -0700
Committer: Xi Wang <xi...@apache.org>
Committed: Thu Sep 11 17:26:51 2014 -0700

----------------------------------------------------------------------
 .../resources/ui/admin-web/app/views/leftNavbar.html | 15 +++++++--------
 1 file changed, 7 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/10dd42ea/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
index 8b57754..b1ab544 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
@@ -38,17 +38,16 @@
                   class="form-control input-sm"
                   tooltip="Only alpha-numeric characters."
                   tooltip-trigger="focus">
-
-              <button
-                  type="submit"
-                  class="btn btn-success btn-xs"
-                  ng-class="{'disabled': editClusterNameForm.newClusterName.$invalid || editCluster.name == cluster.Clusters.cluster_name}">
-                <i class="glyphicon glyphicon-ok"></i>
-              </button>
               <button ng-click="toggleEditName()"
-                      class="btn btn-danger btn-xs">
+                      class="btn btn-xs">
                 <i class="glyphicon glyphicon-remove"></i>
               </button>
+              <button
+                    type="submit"
+                    class="btn btn-primary btn-xs"
+                    ng-class="{'disabled': editClusterNameForm.newClusterName.$invalid || editCluster.name == cluster.Clusters.cluster_name}">
+                <i class="glyphicon glyphicon-ok"></i>
+              </button>
             </div>
           </form>
 


[21/30] git commit: AMBARI-7283 Oozie config backward in compatible change in HDP2.2 (dsen)

Posted by jo...@apache.org.
AMBARI-7283 Oozie config backward in compatible change in HDP2.2 (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6aeb3be2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6aeb3be2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6aeb3be2

Branch: refs/heads/branch-alerts-dev
Commit: 6aeb3be27c4f0f9abc473cd388e954f41fe5172a
Parents: ae48015
Author: Dmytro Sen <ds...@hortonworks.com>
Authored: Fri Sep 12 17:56:55 2014 +0300
Committer: Dmytro Sen <ds...@hortonworks.com>
Committed: Fri Sep 12 17:56:55 2014 +0300

----------------------------------------------------------------------
 .../HDP/2.2/services/OOZIE/configuration/oozie-site.xml  | 11 +++++++++--
 1 file changed, 9 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6aeb3be2/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
index d39f542..8e8be78 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
@@ -32,7 +32,14 @@
     </description>
   </property>
 
-
-
+  <property>
+    <name>oozie.service.coord.check.maximum.frequency</name>
+    <value>false</value>
+    <description>
+      When true, Oozie will reject any coordinators with a frequency faster than 5 minutes.  It is not recommended to disable
+      this check or submit coordinators with frequencies faster than 5 minutes: doing so can cause unintended behavior and
+      additional system stress.
+    </description>
+  </property>
 
 </configuration>


[14/30] git commit: AMBARI-7276 Property: dfs.namenode.http-address has final flag - 'true' in downloaded hdfs-site.xml in HA cluster. (ababiichuk)

Posted by jo...@apache.org.
AMBARI-7276 Property: dfs.namenode.http-address has final flag - 'true' in downloaded hdfs-site.xml in HA cluster. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/18f427dd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/18f427dd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/18f427dd

Branch: refs/heads/branch-alerts-dev
Commit: 18f427dd12c016a11a81e80913b15ab40865c48f
Parents: 5976555
Author: aBabiichuk <ab...@cybervisiontech.com>
Authored: Fri Sep 12 14:05:54 2014 +0300
Committer: aBabiichuk <ab...@cybervisiontech.com>
Committed: Fri Sep 12 14:05:54 2014 +0300

----------------------------------------------------------------------
 ambari-web/app/assets/test/tests.js             |   1 +
 .../highAvailability/progress_controller.js     |  15 +-
 ambari-web/app/utils/config.js                  |  14 +-
 .../progress_controller_test.js                 | 144 +++++++++++++++++++
 4 files changed, 163 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/18f427dd/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index c76bd52..0c5f875 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -46,6 +46,7 @@ var files = ['test/init_model_test',
   'test/controllers/main/admin/repositories_test',
   'test/controllers/main/admin/serviceAccounts_controller_test',
   'test/controllers/main/admin/highAvailability_controller_test',
+  'test/controllers/main/admin/highAvailability/progress_controller_test',
   'test/controllers/main/admin/security_test',
   'test/controllers/main/admin/security/disable_test',
   'test/controllers/main/admin/security/security_progress_controller_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/18f427dd/ambari-web/app/controllers/main/admin/highAvailability/progress_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/progress_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/progress_controller.js
index 67ec07d..242a6b6 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/progress_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/progress_controller.js
@@ -412,8 +412,8 @@ App.HighAvailabilityProgressPageController = App.HighAvailabilityWizardControlle
    *
    * @param siteNames Array
    */
-  reconfigureSites: function(siteNames,data) {
-    var tagName =  'version' + (new Date).getTime();
+  reconfigureSites: function(siteNames, data) {
+    var tagName = App.get('testMode') ? 'version1' : 'version' + (new Date).getTime();
     var componentName;
     switch (this.get('content.controllerName')) {
       case 'rMHighAvailabilityWizardController':
@@ -422,13 +422,18 @@ App.HighAvailabilityProgressPageController = App.HighAvailabilityWizardControlle
       default:
         componentName =  'NAMENODE';
     }
-    return siteNames.map(function(_siteName){
-      return {
+    return siteNames.map(function(_siteName) {
+      var config = data.items.findProperty('type', _siteName);
+      var configToSave = {
         type: _siteName,
         tag: tagName,
-        properties: data.items.findProperty('type', _siteName).properties,
+        properties: config && config.properties,
         service_config_version_note: Em.I18n.t('admin.highAvailability.step4.save.configuration.note').format(App.format.role(componentName))
       }
+      if (config && config.properties_attributes) {
+        configToSave.properties_attributes = config.properties_attributes;
+      }
+      return configToSave;
     });
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/18f427dd/ambari-web/app/utils/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js
index 16089d9..35ca305 100644
--- a/ambari-web/app/utils/config.js
+++ b/ambari-web/app/utils/config.js
@@ -591,14 +591,16 @@ App.config = Em.Object.create({
   addAdvancedConfigs: function (serviceConfigs, advancedConfigs, serviceName) {
     var miscConfigs = serviceConfigs.filterProperty('serviceName', 'MISC');
     var configsToVerifying = (serviceName) ? serviceConfigs.filterProperty('serviceName', serviceName).concat(miscConfigs) : serviceConfigs.slice();
-    var definedConfigs = (serviceName) ? this.get('preDefinedServiceConfigs').findProperty('serviceName', serviceName).get('configs') : [];
+    var definedService = this.get('preDefinedServiceConfigs').findProperty('serviceName', serviceName);
+    if (definedService) {
+      var definedConfigs = (serviceName) ? definedService.get('configs') : [];
 
-    if (definedConfigs.length) {
-      advancedConfigs = advancedConfigs.filter(function(property) {
-        return !(definedConfigs.someProperty('name', property.name) && !serviceConfigs.someProperty('name', property.name));
-      }, this);
+      if (definedConfigs.length) {
+        advancedConfigs = advancedConfigs.filter(function(property) {
+          return !(definedConfigs.someProperty('name', property.name) && !serviceConfigs.someProperty('name', property.name));
+        }, this);
+      }
     }
-
     advancedConfigs.forEach(function (_config) {
       var configType = this.getConfigTagFromFileName(_config.filename);
       var configCategory = 'Advanced ' + configType;

http://git-wip-us.apache.org/repos/asf/ambari/blob/18f427dd/ambari-web/test/controllers/main/admin/highAvailability/progress_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/highAvailability/progress_controller_test.js b/ambari-web/test/controllers/main/admin/highAvailability/progress_controller_test.js
new file mode 100644
index 0000000..ff54663
--- /dev/null
+++ b/ambari-web/test/controllers/main/admin/highAvailability/progress_controller_test.js
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+var App = require('app');
+require('controllers/main/admin/highAvailability_controller');
+require('models/host_component');
+require('models/host');
+require('utils/ajax/ajax');
+
+describe('App.HighAvailabilityProgressPageController', function () {
+
+  var controller = App.HighAvailabilityProgressPageController.create();
+
+  describe('#reconfigureSites()', function () {
+    var tests = [
+      {
+        content: {
+          controllerName: "rMHighAvailabilityWizardController"
+        },
+        siteNames: ["site1", "site2"],
+        data: {
+          items: [
+            {
+              type: "site1",
+              properties: {
+                site1_property1: "site1_property1_value",
+                site1_property2: "site1_property2_value"
+              },
+              properties_attributes: {
+                final: {
+                  site1_property1: "true"
+                }
+              }
+            },
+            {
+              type: "site2",
+              properties: {
+                site2_property1: "site2_property1_value",
+                site2_property2: "site2_property2_value"
+              }
+            },
+            {
+              type: "site3",
+              properties: {
+                site3_property: "site3_property_value"
+              }
+            }
+          ]
+        },
+        result: [
+          {
+            type: "site1",
+            tag: "version1",
+            properties: {
+              site1_property1: "site1_property1_value",
+              site1_property2: "site1_property2_value"
+            },
+            service_config_version_note: Em.I18n.t('admin.highAvailability.step4.save.configuration.note').format("ResourceManager"),
+            properties_attributes: {
+              final: {
+                site1_property1: "true"
+              }
+            }
+          },
+          {
+            type: "site2",
+            tag: "version1",
+            properties: {
+              site2_property1: "site2_property1_value",
+              site2_property2: "site2_property2_value"
+            },
+            service_config_version_note: Em.I18n.t('admin.highAvailability.step4.save.configuration.note').format("ResourceManager")
+          }
+        ]
+      },
+      {
+        content: {
+          controllerName: "anyOther"
+        },
+        siteNames: ["site1"],
+        data: {
+          items: [
+            {
+              type: "site1",
+              properties: {
+                site1_property1: "site1_property1_value",
+                site1_property2: "site1_property2_value"
+              },
+              properties_attributes: {
+                final: {
+                  site1_property1: "true"
+                }
+              }
+            }
+          ]
+        },
+        result: [
+          {
+            type: "site1",
+            tag: "version1",
+            properties: {
+              site1_property1: "site1_property1_value",
+              site1_property2: "site1_property2_value"
+            },
+            service_config_version_note: Em.I18n.t('admin.highAvailability.step4.save.configuration.note').format("NameNode"),
+            properties_attributes: {
+              final: {
+                site1_property1: "true"
+              }
+            }
+          }
+        ]
+      }];
+    beforeEach(function() {
+      App.set('testMode', true);
+    });
+    afterEach(function() {
+      App.set('testMode', false);
+    });
+    it("reconfigures configs after HA", function() {
+      tests.forEach(function(t) {
+        controller.set('content', t.content);
+        expect(controller.reconfigureSites(t.siteNames, t.data)).to.eql(t.result);
+      });
+    });
+  });
+
+});


[22/30] git commit: Merge remote-tracking branch 'origin/trunk' into origin-trunk

Posted by jo...@apache.org.
Merge remote-tracking branch 'origin/trunk' into origin-trunk


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2a7fb78e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2a7fb78e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2a7fb78e

Branch: refs/heads/branch-alerts-dev
Commit: 2a7fb78ec0e9e9a0e0220a474f2821bd997c98ad
Parents: 1b52db2 6aeb3be
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Fri Sep 12 18:08:09 2014 +0300
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Fri Sep 12 18:08:09 2014 +0300

----------------------------------------------------------------------
 .../internal/ServiceConfigVersionResourceProvider.java   |  2 +-
 .../HDP/2.2/services/OOZIE/configuration/oozie-site.xml  | 11 +++++++++--
 2 files changed, 10 insertions(+), 3 deletions(-)
----------------------------------------------------------------------



[03/30] AMBARI-7257 Use Versioned RPMS for HDP 2.2 stack and make it plugabable to be able to reuse the scripts for HDP 2.* (dsen)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
index 6aff622..83e40c6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
@@ -78,7 +78,7 @@ def yarn(name = None):
   )
 
   XmlConfig("core-site.xml",
-            conf_dir=params.config_dir,
+            conf_dir=params.hadoop_conf_dir,
             configurations=params.config['configurations']['core-site'],
             configuration_attributes=params.config['configuration_attributes']['core-site'],
             owner=params.hdfs_user,
@@ -87,7 +87,7 @@ def yarn(name = None):
   )
 
   XmlConfig("mapred-site.xml",
-            conf_dir=params.config_dir,
+            conf_dir=params.hadoop_conf_dir,
             configurations=params.config['configurations']['mapred-site'],
             configuration_attributes=params.config['configuration_attributes']['mapred-site'],
             owner=params.yarn_user,
@@ -96,7 +96,7 @@ def yarn(name = None):
   )
 
   XmlConfig("yarn-site.xml",
-            conf_dir=params.config_dir,
+            conf_dir=params.hadoop_conf_dir,
             configurations=params.config['configurations']['yarn-site'],
             configuration_attributes=params.config['configuration_attributes']['yarn-site'],
             owner=params.yarn_user,
@@ -105,7 +105,7 @@ def yarn(name = None):
   )
 
   XmlConfig("capacity-scheduler.xml",
-            conf_dir=params.config_dir,
+            conf_dir=params.hadoop_conf_dir,
             configurations=params.config['configurations']['capacity-scheduler'],
             configuration_attributes=params.config['configuration_attributes']['capacity-scheduler'],
             owner=params.yarn_user,
@@ -140,7 +140,7 @@ def yarn(name = None):
        content=Template('mapreduce.conf.j2')
   )
 
-  File(format("{config_dir}/yarn-env.sh"),
+  File(format("{hadoop_conf_dir}/yarn-env.sh"),
        owner=params.yarn_user,
        group=params.user_group,
        mode=0755,
@@ -154,7 +154,7 @@ def yarn(name = None):
          mode=06050
     )
 
-    File(format("{config_dir}/container-executor.cfg"),
+    File(format("{hadoop_conf_dir}/container-executor.cfg"),
          group=params.user_group,
          mode=0644,
          content=Template('container-executor.cfg.j2')
@@ -168,7 +168,7 @@ def yarn(name = None):
     tc_mode = None
     tc_owner = params.hdfs_user
 
-  File(format("{config_dir}/mapred-env.sh"),
+  File(format("{hadoop_conf_dir}/mapred-env.sh"),
        owner=tc_owner,
        content=InlineTemplate(params.mapred_env_sh_template)
   )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
index 2dc3792..6016b99 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
@@ -26,15 +26,24 @@ import status_params
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-config_dir = "/etc/zookeeper/conf"
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  config_dir = format('/usr/hdp/{rpm_version}/etc/zookeeper/conf')
+  zk_bin = format('/usr/hdp/{rpm_version}/zookeeper/bin')
+  smoke_script = format('/usr/hdp/{rpm_version}/zookeeper/bin/zkCli.sh')
+else:
+  config_dir = "/etc/zookeeper/conf"
+  zk_bin = '/usr/lib/zookeeper/bin'
+  smoke_script = "/usr/lib/zookeeper/bin/zkCli.sh"
+
 zk_user =  config['configurations']['zookeeper-env']['zk_user']
 hostname = config['hostname']
-zk_bin = '/usr/lib/zookeeper/bin'
 user_group = config['configurations']['cluster-env']['user_group']
 zk_env_sh_template = config['configurations']['zookeeper-env']['content']
 
-smoke_script = "/usr/lib/zookeeper/bin/zkCli.sh"
-
 zk_log_dir = config['configurations']['zookeeper-env']['zk_log_dir']
 zk_data_dir = config['configurations']['zookeeper-env']['zk_data_dir']
 zk_pid_dir = status_params.zk_pid_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
index 7a61c8a..79bdef3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
@@ -23,6 +23,17 @@ from status_params import *
 
 config = Script.get_config()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_bin_dir = "/usr/bin"
+
 oozie_user = config['configurations']['oozie-env']['oozie_user']
 falcon_user = config['configurations']['falcon-env']['falcon_user']
 smoke_user =  config['configurations']['cluster-env']['smokeuser']
@@ -53,7 +64,6 @@ flacon_apps_dir = '/apps/falcon'
 #for create_hdfs_directory
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -67,5 +77,6 @@ HdfsDirectory = functools.partial(
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
index 19668c7..7115de4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
@@ -37,8 +37,7 @@ nimbus_host = config['configurations']['storm-site']['nimbus.host']
 rest_api_port = "8745"
 rest_api_admin_port = "8746"
 rest_api_conf_file = format("{conf_dir}/config.yaml")
-rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
-java_home = config['hostLevelParams']['java_home']
+rest_lib_dir = default("/configurations/storm-env/rest_lib_dir","/usr/lib/storm/contrib/storm-rest")
 storm_env_sh_template = config['configurations']['storm-env']['content']
 
 if 'ganglia_server_host' in config['clusterHostInfo'] and \
@@ -48,7 +47,7 @@ if 'ganglia_server_host' in config['clusterHostInfo'] and \
   ganglia_report_interval = 60
 else:
   ganglia_installed = False
-  
+
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
 if security_enabled:

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/metainfo.xml
new file mode 100644
index 0000000..0be6cb6
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/metainfo.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+    <versions>
+	  <active>true</active>
+    </versions>
+    <extends>2.1</extends>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/repos/repoinfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/repos/repoinfo.xml
new file mode 100644
index 0000000..c99f92a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/repos/repoinfo.xml
@@ -0,0 +1,82 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<reposinfo>
+  <os type="redhat6">
+    <repo>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.9.9.9-98</baseurl>
+      <repoid>HDP-2.9.9.9-98</repoid>
+      <reponame>HDP</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0</baseurl>
+      <repoid>HDP-2.2.0.0</repoid>
+      <reponame>HDP-2.2</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.17/repos/centos6</baseurl>
+      <repoid>HDP-UTILS-1.1.0.17</repoid>
+      <reponame>HDP-UTILS</reponame>
+    </repo>
+  </os>
+  <os type="redhat5">
+    <repo>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/BUILDS/2.9.9.9-98</baseurl>
+      <repoid>HDP-2.9.9.9-98</repoid>
+      <reponame>HDP</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0</baseurl>
+      <repoid>HDP-2.2.0.0</repoid>
+      <reponame>HDP-2.2</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.17/repos/centos5</baseurl>
+      <repoid>HDP-UTILS-1.1.0.17</repoid>
+      <reponame>HDP-UTILS</reponame>
+    </repo>
+  </os>
+  <os type="suse11">
+    <repo>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/BUILDS/2.9.9.9-98</baseurl>
+      <repoid>HDP-2.9.9.9-98</repoid>
+      <reponame>HDP</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/updates/2.2.0.0</baseurl>
+      <repoid>HDP-2.2.0.0</repoid>
+      <reponame>HDP-2.2</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.17/repos/suse11</baseurl>
+      <repoid>HDP-UTILS-1.1.0.17</repoid>
+      <reponame>HDP-UTILS</reponame>
+    </repo>
+  </os>
+  <os type="debian12">
+    <repo>
+      <baseurl>REPLACE_WITH_UBUNTU12_URL</baseurl>
+      <repoid>HDP-2.1</repoid>
+      <reponame>HDP</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://dev.hortonworks.com.s3.amazonaws.com/HDP-UTILS-1.1.0.19/repos/ubuntu12</baseurl>
+      <repoid>HDP-UTILS-1.1.0.19</repoid>
+      <reponame>HDP-UTILS</reponame>
+    </repo>
+  </os>
+</reposinfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.2/role_command_order.json
new file mode 100644
index 0000000..a6f3e07
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/role_command_order.json
@@ -0,0 +1,88 @@
+{
+  "_comment" : "Record format:",
+  "_comment" : "blockedRole-blockedCommand: [blockerRole1-blockerCommand1, blockerRole2-blockerCommand2, ...]",
+  "general_deps" : {
+    "_comment" : "dependencies for all cases",
+    "NAGIOS_SERVER-INSTALL" : ["HIVE_CLIENT-INSTALL", "HCAT-INSTALL",
+        "MAPREDUCE_CLIENT-INSTALL", "OOZIE_CLIENT-INSTALL"],
+    "NIMBUS-START" : ["ZOOKEEPER_SERVER-START", "NODEMANAGER-START", "RESOURCEMANAGER-START"],
+    "SUPERVISOR-START" : ["NIMBUS-START"],
+    "STORM_UI_SERVER-START" : ["NIMBUS-START"],
+    "DRPC_SERVER-START" : ["NIMBUS-START"],
+    "STORM_REST_API-START" : ["NIMBUS-START", "STORM_UI_SERVER-START", "SUPERVISOR-START", "DRPC_SERVER-START"],
+    "HBASE_MASTER-START": ["ZOOKEEPER_SERVER-START"],
+    "HBASE_REGIONSERVER-START": ["HBASE_MASTER-START"],
+    "OOZIE_SERVER-START": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
+    "WEBHCAT_SERVER-START": ["NODEMANAGER-START", "HIVE_SERVER-START"],
+    "HIVE_METASTORE-START": ["MYSQL_SERVER-START"],
+    "HIVE_SERVER-START": ["NODEMANAGER-START", "MYSQL_SERVER-START"],
+    "HUE_SERVER-START": ["HIVE_SERVER-START", "HCAT-START", "OOZIE_SERVER-START"],
+    "FLUME_HANDLER-START": ["OOZIE_SERVER-START"],
+    "FALCON_SERVER-START": ["NAMENODE-START", "DATANODE-START", "OOZIE_SERVER-START"],
+    "NAGIOS_SERVER-START": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START",
+        "GANGLIA_SERVER-START", "GANGLIA_MONITOR-START", "HCAT-START",
+        "HIVE_SERVER-START", "HIVE_METASTORE-START", "HUE_SERVER-START",
+        "NODEMANAGER-START", "RESOURCEMANAGER-START", "ZOOKEEPER_SERVER-START",
+        "MYSQL_SERVER-START", "OOZIE_SERVER-START", "PIG-START", "SQOOP-START",
+        "WEBHCAT_SERVER-START", "FLUME_HANDLER-START"],
+    "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
+    "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
+    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
+    "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
+    "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+    "PIG_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
+    "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
+    "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],
+    "ZOOKEEPER_QUORUM_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],
+    "STORM_SERVICE_CHECK-SERVICE_CHECK": ["NIMBUS-START", "SUPERVISOR-START", "STORM_UI_SERVER-START",
+        "DRPC_SERVER-START"],
+    "FLUME_SERVICE_CHECK-SERVICE_CHECK": ["FLUME_HANDLER-START"],
+    "FALCON_SERVICE_CHECK-SERVICE_CHECK": ["FALCON_SERVER-START"],
+    "ZOOKEEPER_SERVER-STOP" : ["HBASE_MASTER-STOP", "HBASE_REGIONSERVER-STOP"],
+    "HBASE_MASTER-STOP": ["HBASE_REGIONSERVER-STOP"],
+    "NIMBUS-STOP" : ["SUPERVISOR-STOP", "STORM_UI_SERVER-STOP", "DRPC_SERVER-STOP"]
+  },
+  "_comment" : "GLUSTERFS-specific dependencies",
+  "optional_glusterfs": {
+    "HBASE_MASTER-START": ["PEERSTATUS-START"],
+    "GLUSTERFS_SERVICE_CHECK-SERVICE_CHECK": ["PEERSTATUS-START"]
+  },
+  "_comment" : "Dependencies that are used when GLUSTERFS is not present in cluster",
+  "optional_no_glusterfs": {
+    "SECONDARY_NAMENODE-START": ["NAMENODE-START"],
+    "RESOURCEMANAGER-START": ["NAMENODE-START", "DATANODE-START"],
+    "NODEMANAGER-START": ["NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START"],
+    "HISTORYSERVER-START": ["NAMENODE-START", "DATANODE-START"],
+    "HBASE_MASTER-START": ["NAMENODE-START", "DATANODE-START"],
+    "APP_TIMELINE_SERVER-START": ["NAMENODE-START", "DATANODE-START"],
+    "FALCON_SERVER-START": ["NAMENODE-START", "DATANODE-START"],
+    "FALCON_SERVICE_CHECK-SERVICE_CHECK": ["FALCON_SERVER-START"],
+    "HIVE_SERVER-START": ["DATANODE-START"],
+    "WEBHCAT_SERVER-START": ["DATANODE-START"],
+    "NAGIOS_SERVER-START": ["NAMENODE-START", "SECONDARY_NAMENODE-START",
+        "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START", "HISTORYSERVER-START"],
+    "HDFS_SERVICE_CHECK-SERVICE_CHECK": ["NAMENODE-START", "DATANODE-START",
+        "SECONDARY_NAMENODE-START"],
+    "MAPREDUCE2_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START",
+        "RESOURCEMANAGER-START", "HISTORYSERVER-START", "YARN_SERVICE_CHECK-SERVICE_CHECK"],
+    "YARN_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
+    "RESOURCEMANAGER_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START"],
+    "PIG_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
+    "NAMENODE-STOP": ["RESOURCEMANAGER-STOP", "NODEMANAGER-STOP",
+        "HISTORYSERVER-STOP", "HBASE_MASTER-STOP", "FALCON_SERVER-STOP"],
+    "DATANODE-STOP": ["RESOURCEMANAGER-STOP", "NODEMANAGER-STOP",
+        "HISTORYSERVER-STOP", "HBASE_MASTER-STOP", "FALCON_SERVER-STOP"]
+  },
+  "_comment" : "Dependencies that are used in HA NameNode cluster",
+  "namenode_optional_ha": {
+    "NAMENODE-START": ["ZKFC-START", "JOURNALNODE-START", "ZOOKEEPER_SERVER-START"],
+    "ZKFC-START": ["ZOOKEEPER_SERVER-START"],
+    "NAGIOS_SERVER-START": ["ZKFC-START", "JOURNALNODE-START"]
+  },
+  "_comment" : "Dependencies that are used in ResourceManager HA cluster",
+  "resourcemanager_optional_ha" : {
+    "RESOURCEMANAGER-START": ["ZOOKEEPER_SERVER-START"]
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
new file mode 100644
index 0000000..4a46139
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>FALCON</name>
+      <displayName>Falcon</displayName>
+      <comment>Data management and processing platform</comment>
+      <version>0.6.0.2.2.0.0</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
new file mode 100644
index 0000000..6b702c8
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
@@ -0,0 +1,40 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>FLUME</name>
+      <displayName>Flume</displayName>
+      <comment>Data management and processing platform</comment>
+      <version>1.5.0.1.2.9.9.9</version>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>flume_2_9_9_9_98</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
new file mode 100644
index 0000000..52cd10d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HBASE</name>
+      <displayName>HBase</displayName>
+      <comment>Non-relational distributed database and centralized service for configuration management &amp;
+        synchronization
+      </comment>
+      <version>0.98.4.2.9.9.9</version>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>hbase_2_9_9_9_98</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
new file mode 100644
index 0000000..3213506
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+  <property>
+    <name>rpm_version</name>
+    <value>2.9.9.9-98</value>
+    <description>Hadoop RPM version</description>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml
new file mode 100644
index 0000000..4f46cb7
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>dfs.hosts.exclude</name>
+    <value>/usr/hdp/2.9.9.9-98/etc/hadoop/conf/dfs.exclude</value>
+    <description>Names a file that contains a list of hosts that are
+      not permitted to connect to the namenode.  The full pathname of the
+      file must be specified.  If the value is empty, no hosts are
+      excluded.</description>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
new file mode 100644
index 0000000..b520a34
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
@@ -0,0 +1,68 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HDFS</name>
+      <displayName>HDFS</displayName>
+      <comment>Apache Hadoop Distributed File System</comment>
+      <version>2.6.0.2.9.9.9</version>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>hadoop_2_9_9_9_98</name>
+            </package>
+            <package>
+              <name>hadoop-lzo</name>
+            </package>
+          </packages>
+        </osSpecific>
+        
+        <osSpecific>
+          <osFamily>redhat5,redhat6,suse11</osFamily>
+          <packages>
+            <package>
+              <name>snappy</name>
+            </package>
+            <package>
+              <name>snappy-devel</name>
+            </package>
+            <package>
+              <name>lzo</name>
+            </package>
+            <package>
+              <name>hadoop-lzo-native</name>
+            </package>
+            <package>
+              <name>hadoop_2_9_9_9_98-libhdfs</name>
+            </package>
+            <package>
+              <name>ambari-log4j</name>
+            </package>
+          </packages>
+        </osSpecific>
+            
+      </osSpecifics>
+
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
new file mode 100644
index 0000000..28567a7
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HIVE</name>
+      <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
+      <version>0.14.0.2.9.9.9</version>
+    </service>
+
+    <service>
+      <name>HCATALOG</name>
+      <comment>This is comment for HCATALOG service</comment>
+      <version>0.14.0.2.9.9.9</version>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>hive_2_9_9_9_98-hcatalog</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+    </service>
+
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
new file mode 100644
index 0000000..d39f542
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+        
+       http://www.apache.org/licenses/LICENSE-2.0
+  
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>oozie.service.HadoopAccessorService.hadoop.configurations</name>
+    <value>*=/usr/hdp/2.9.9.9-98/etc/hadoop/conf</value>
+    <description>
+      Comma separated AUTHORITY=HADOOP_CONF_DIR, where AUTHORITY is the HOST:PORT of
+      the Hadoop service (JobTracker, HDFS). The wildcard '*' configuration is
+      used when there is no exact match for an authority. The HADOOP_CONF_DIR contains
+      the relevant Hadoop *-site.xml files. If the path is relative is looked within
+      the Oozie configuration directory; though the path can be absolute (i.e. to point
+      to Hadoop client conf/ directories in the local filesystem.
+    </description>
+  </property>
+
+
+
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
new file mode 100644
index 0000000..5c77061
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>OOZIE</name>
+      <comment>System for workflow coordination and execution of Apache Hadoop jobs.  This also includes the installation of the optional Oozie Web Console which relies on and will install the &lt;a target="_blank" href="http://www.sencha.com/legal/open-source-faq/"&gt;ExtJS&lt;/a&gt; Library.
+      </comment>
+      <version>4.1.0.2.2.0.0</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
new file mode 100644
index 0000000..335993f
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>PIG</name>
+      <displayName>Pig</displayName>
+      <comment>Scripting platform for analyzing large datasets</comment>
+      <version>0.14.0.2.9.9.9</version>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>pig_2_9_9_9_98</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
new file mode 100644
index 0000000..f644d74
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>SQOOP</name>
+      <comment>Tool for transferring bulk data between Apache Hadoop and
+        structured data stores such as relational databases
+      </comment>
+      <version>1.4.5.2.2</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-env.xml
new file mode 100644
index 0000000..6b2b550
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-env.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+  <property>
+    <name>rest_lib_dir</name>
+    <value>/usr/lib/storm/external/storm-rest</value>
+    <description></description>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml
new file mode 100644
index 0000000..396af4a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml
@@ -0,0 +1,54 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_final="true">
+
+
+  <property>
+    <name>nimbus.childopts</name>
+    <value>-Xmx1024m -Djava.security.auth.login.config=/etc/storm/conf/storm_jaas.conf -javaagent:/usr/lib/storm/external/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8649,wireformat31x=true,mode=multicast,config=/usr/lib/storm/external/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM</value>
+    <description>This parameter is used by the storm-deploy project to configure the jvm options for the nimbus daemon.</description>
+  </property>
+
+  <property>
+    <name>worker.childopts</name>
+    <value>-Xmx768m -javaagent:/usr/lib/storm/external/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/lib/storm/external/storm-jmxetric/conf/jmxetric-conf.xml,process=Worker_%ID%_JVM</value>
+    <description>The jvm opts provided to workers launched by this supervisor. All \"%ID%\" substrings are replaced with an identifier for this worker.</description>
+  </property>
+
+
+
+  <property>
+    <name>ui.childopts</name>
+    <value>-Xmx768m -Djava.security.auth.login.config=/etc/storm/conf/storm_jaas.conf</value>
+    <description>Childopts for Storm UI Java process.</description>
+  </property>
+
+  <property>
+    <name>supervisor.childopts</name>
+    <value>-Xmx256m -Djava.security.auth.login.config=/etc/storm/conf/storm_jaas.conf -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=56431 -javaagent:/usr/lib/storm/external/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/lib/storm/external/storm-jmxetric/conf/jmxetric-conf.xml,process=Supervisor_JVM</value>
+    <description>This parameter is used by the storm-deploy project to configure the jvm options for the supervisor daemon.</description>
+  </property>
+
+
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
new file mode 100644
index 0000000..c25718d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>STORM</name>
+      <displayName>Storm</displayName>
+      <comment>Apache Hadoop Stream processing framework</comment>
+      <version>0.9.3.2.2.0.0</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
new file mode 100644
index 0000000..25f579a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
@@ -0,0 +1,40 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>TEZ</name>
+      <displayName>Tez</displayName>
+      <comment>Tez is the next generation Hadoop Query Processing framework written on top of YARN.</comment>
+      <version>0.6.0.2.9.9.9</version>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>tez_2_9_9_9_98</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
new file mode 100644
index 0000000..d14be36
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<!-- The default settings for Templeton. -->
+<!-- Edit templeton-site.xml to change settings for your local -->
+<!-- install. -->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>templeton.hadoop.conf.dir</name>
+    <value>/usr/hdp/2.9.9.9-98/etc/hadoop/conf</value>
+    <description>The path to the Hadoop configuration.</description>
+  </property>
+
+  <property>
+    <name>templeton.jar</name>
+    <value>/usr/hdp/2.9.9.9-98/hcatalog/share/webhcat/svr/webhcat.jar</value>
+    <description>The path to the Templeton jar file.</description>
+  </property>
+
+  <property>
+    <name>templeton.libjars</name>
+    <value>/usr/hdp/2.9.9.9-98/zookeeper/zookeeper.jar</value>
+    <description>Jars to add the the classpath.</description>
+  </property>
+
+
+  <property>
+    <name>templeton.hadoop</name>
+    <value>/usr/hdp/2.9.9.9-98/hadoop/bin/hadoop</value>
+    <description>The path to the Hadoop executable.</description>
+  </property>
+
+
+  <property>
+    <name>templeton.hcat</name>
+    <value>/usr/hdp/2.9.9.9-98/hive/bin/hcat</value>
+    <description>The path to the hcatalog executable.</description>
+  </property>
+
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml
new file mode 100644
index 0000000..a05f9e7
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>WEBHCAT</name>
+      <comment>This is comment for WEBHCAT service</comment>
+      <version>0.14.0.2.9.9.9</version>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>hive_2_9_9_9_98-webhcat</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
new file mode 100644
index 0000000..a831936
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration supports_final="true" xmlns:xi="http://www.w3.org/2001/XInclude">
+
+  <property>
+    <name>mapreduce.admin.user.env</name>
+    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/hdp/2.9.9.9-98/hadoop/lib/native/Linux-amd64-64</value>
+    <description>
+      Additional execution environment entries for map and reduce task processes.
+      This is not an additive property. You must preserve the original value if
+      you want your map and reduce tasks to have access to native libraries (compression, etc)
+    </description>
+  </property>
+
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
new file mode 100644
index 0000000..065f57e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration supports_final="true" xmlns:xi="http://www.w3.org/2001/XInclude">
+
+  <property>
+    <name>yarn.resourcemanager.nodes.exclude-path</name>
+    <value>/usr/hdp/2.9.9.9-98/etc/hadoop/conf/yarn.exclude</value>
+    <description>
+      Names a file that contains a list of hosts that are
+      not permitted to connect to the resource manager.  The full pathname of the
+      file must be specified.  If the value is empty, no hosts are
+      excluded.
+    </description>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
new file mode 100644
index 0000000..7a30894
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
@@ -0,0 +1,71 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>YARN</name>
+      <displayName>YARN</displayName>
+      <comment>Apache Hadoop NextGen MapReduce (YARN)</comment>
+      <version>2.6.0.2.9.9.9</version>
+      <components>
+        <components>
+          <component>
+            <name>APP_TIMELINE_SERVER</name>
+            <cardinality>1</cardinality>
+          </component>
+        </components>
+      </components>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>hadoop_2_9_9_9_98-yarn</name>
+            </package>
+            <package>
+              <name>hadoop_2_9_9_9_98-mapreduce</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+    </service>
+
+    <service>
+      <name>MAPREDUCE2</name>
+      <displayName>MapReduce2</displayName>
+      <comment>Apache Hadoop NextGen MapReduce (YARN)</comment>
+      <version>2.6.0.2.9.9.9</version>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>hadoop_2_9_9_9_98-mapreduce</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+      <configuration-dir>configuration-mapred</configuration-dir>
+
+    </service>
+
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
new file mode 100644
index 0000000..525faef
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
@@ -0,0 +1,40 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>ZOOKEEPER</name>
+      <displayName>ZooKeeper</displayName>
+      <comment>Centralized service which provides highly reliable distributed coordination</comment>
+      <version>3.4.5.2.9.9.9</version>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>zookeeper_2_9_9_9_98</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py
index 78cfde7..beed46a 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py
@@ -50,18 +50,29 @@ class TestServiceCheck(RMFTestCase):
         tries = 20,
         conf_dir = '/etc/hadoop/conf',
         try_sleep = 3,
+        bin_dir = '/usr/bin',
         user = 'ambari-qa',
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp ; hadoop fs -chmod 777 /tmp',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
         conf_dir = '/etc/hadoop/conf',
+        bin_dir = '/usr/bin',
         logoutput = True,
-        not_if = 'hadoop fs -test -e /tmp',
+        not_if = 'hadoop --config /etc/hadoop/conf fs -test -e /tmp',
         try_sleep = 3,
         tries = 5,
         user = 'ambari-qa',
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -rm /tmp/; hadoop fs -put /etc/passwd /tmp/',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -chmod 777 /tmp',
+        conf_dir = '/etc/hadoop/conf',
+        bin_dir = '/usr/bin',
+        logoutput = True,
+        try_sleep = 3,
+        tries = 5,
+        user = 'ambari-qa',
+    )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -rm /tmp/; hadoop --config /etc/hadoop/conf fs -put /etc/passwd /tmp/',
         logoutput = True,
+        bin_dir = '/usr/bin',
         tries = 5,
         conf_dir = '/etc/hadoop/conf',
         try_sleep = 3,
@@ -70,6 +81,7 @@ class TestServiceCheck(RMFTestCase):
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/',
         logoutput = True,
         tries = 5,
+        bin_dir = '/usr/bin',
         conf_dir = '/etc/hadoop/conf',
         try_sleep = 3,
         user = 'ambari-qa',

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index c820120..a2261fb 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -250,6 +250,7 @@ class TestHBaseMaster(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
                               owner = 'hbase',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
@@ -260,6 +261,7 @@ class TestHBaseMaster(RMFTestCase):
                               kinit_path_local = "/usr/bin/kinit",
                               mode = 0711,
                               owner = 'hbase',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -268,6 +270,7 @@ class TestHBaseMaster(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
                               action = ['create'],
                               )
 
@@ -350,6 +353,7 @@ class TestHBaseMaster(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               owner = 'hbase',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
@@ -360,6 +364,7 @@ class TestHBaseMaster(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0711,
                               owner = 'hbase',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -368,5 +373,6 @@ class TestHBaseMaster(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
                               action = ['create'],
                               )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
index 6a97941..c705fbd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
@@ -179,6 +179,7 @@ class TestHbaseRegionServer(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               owner = 'hbase',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
@@ -189,6 +190,7 @@ class TestHbaseRegionServer(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0711,
                               owner = 'hbase',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -197,6 +199,7 @@ class TestHbaseRegionServer(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
                               action = ['create'],
                               )
 
@@ -279,6 +282,7 @@ class TestHbaseRegionServer(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               owner = 'hbase',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
@@ -289,6 +293,7 @@ class TestHbaseRegionServer(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0711,
                               owner = 'hbase',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -297,5 +302,6 @@ class TestHbaseRegionServer(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
                               action = ['create'],
                               )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py
index 7f9bfa4..7dab7fc 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py
@@ -39,13 +39,13 @@ class TestServiceCheck(RMFTestCase):
       content = Template('hbase-smoke.sh.j2'),
       mode = 0755,
     )
-    self.assertResourceCalled('Execute', ' hbase --config /etc/hbase/conf shell /tmp/hbase-smoke.sh',
+    self.assertResourceCalled('Execute', ' /usr/lib/hbase/bin/hbase --config /etc/hbase/conf shell /tmp/hbase-smoke.sh',
       logoutput = True,
       tries = 3,
       user = 'ambari-qa',
       try_sleep = 5,
     )
-    self.assertResourceCalled('Execute', ' /tmp/hbaseSmokeVerify.sh /etc/hbase/conf ',
+    self.assertResourceCalled('Execute', ' /tmp/hbaseSmokeVerify.sh /etc/hbase/conf  /usr/lib/hbase/bin/hbase',
       logoutput = True,
       tries = 3,
       user = 'ambari-qa',
@@ -74,16 +74,16 @@ class TestServiceCheck(RMFTestCase):
       group = 'hadoop',
       mode = 0644,
     )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hbase.headless.keytab hbase; hbase shell /tmp/hbase_grant_permissions.sh',
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hbase.headless.keytab hbase; /usr/lib/hbase/bin/hbase shell /tmp/hbase_grant_permissions.sh',
       user = 'hbase',
     )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; hbase --config /etc/hbase/conf shell /tmp/hbase-smoke.sh',
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; /usr/lib/hbase/bin/hbase --config /etc/hbase/conf shell /tmp/hbase-smoke.sh',
       logoutput = True,
       tries = 3,
       user = 'ambari-qa',
       try_sleep = 5,
     )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; /tmp/hbaseSmokeVerify.sh /etc/hbase/conf ',
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; /tmp/hbaseSmokeVerify.sh /etc/hbase/conf  /usr/lib/hbase/bin/hbase',
       logoutput = True,
       tries = 3,
       user = 'ambari-qa',

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 5e38f66..c7d2601 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -48,7 +48,7 @@ class TestNamenode(RMFTestCase):
                               content = StaticFile('checkForFormat.sh'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('Execute', '/tmp/checkForFormat.sh hdfs /etc/hadoop/conf /var/run/hadoop/hdfs/namenode/formatted/ /var/lib/hdfs/namenode/formatted/ /hadoop/hdfs/namenode',
+    self.assertResourceCalled('Execute', '/tmp/checkForFormat.sh hdfs /etc/hadoop/conf /usr/bin /var/run/hadoop/hdfs/namenode/formatted/ /var/lib/hdfs/namenode/formatted/ /hadoop/hdfs/namenode',
                               path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
                               not_if = 'test -d /var/run/hadoop/hdfs/namenode/formatted/ || test -d /var/lib/hdfs/namenode/formatted/',
                               )
@@ -75,7 +75,7 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('Execute', 'ulimit -c unlimited;  su - hdfs -c \'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode\'',
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
-    self.assertResourceCalled('Execute', "su - hdfs -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'",
+    self.assertResourceCalled('Execute', "su - hdfs -c 'export PATH=$PATH:/usr/bin ; hadoop --config /etc/hadoop/conf dfsadmin -safemode get' | grep 'Safe mode is OFF'",
                               tries = 40,
                               only_if = None,
                               try_sleep = 10,
@@ -88,6 +88,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0777,
                               owner = 'hdfs',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
@@ -98,6 +99,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0770,
                               owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -107,6 +109,7 @@ class TestNamenode(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               action = ['create'],
+                              bin_dir = '/usr/bin',
                               only_if = None,
                               )
     self.assertNoMoreResources()
@@ -149,7 +152,7 @@ class TestNamenode(RMFTestCase):
                               content = StaticFile('checkForFormat.sh'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('Execute', '/tmp/checkForFormat.sh hdfs /etc/hadoop/conf /var/run/hadoop/hdfs/namenode/formatted/ /var/lib/hdfs/namenode/formatted/ /hadoop/hdfs/namenode',
+    self.assertResourceCalled('Execute', '/tmp/checkForFormat.sh hdfs /etc/hadoop/conf /usr/bin /var/run/hadoop/hdfs/namenode/formatted/ /var/lib/hdfs/namenode/formatted/ /hadoop/hdfs/namenode',
                               path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
                               not_if = 'test -d /var/run/hadoop/hdfs/namenode/formatted/ || test -d /var/lib/hdfs/namenode/formatted/',
                               )
@@ -179,7 +182,7 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
                               user = 'hdfs',
                               )
-    self.assertResourceCalled('Execute', "su - hdfs -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'",
+    self.assertResourceCalled('Execute', "su - hdfs -c 'export PATH=$PATH:/usr/bin ; hadoop --config /etc/hadoop/conf dfsadmin -safemode get' | grep 'Safe mode is OFF'",
                               tries = 40,
                               only_if = None,
                               try_sleep = 10,
@@ -192,6 +195,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0777,
                               owner = 'hdfs',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
@@ -202,6 +206,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0770,
                               owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -211,6 +216,7 @@ class TestNamenode(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               action = ['create'],
+                              bin_dir = '/usr/bin',
                               only_if = None,
                               )
     self.assertNoMoreResources()
@@ -260,9 +266,9 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('Execute', 'ulimit -c unlimited;  su - hdfs -c \'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode\'',
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
-    self.assertResourceCalled('Execute', "su - hdfs -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'",
+    self.assertResourceCalled('Execute', "su - hdfs -c 'export PATH=$PATH:/usr/bin ; hadoop --config /etc/hadoop/conf dfsadmin -safemode get' | grep 'Safe mode is OFF'",
                               tries = 40,
-                              only_if = "su - hdfs -c 'hdfs haadmin -getServiceState nn1 | grep active > /dev/null'",
+                              only_if = "su - hdfs -c 'export PATH=$PATH:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active > /dev/null'",
                               try_sleep = 10,
                               )
     self.assertResourceCalled('HdfsDirectory', '/tmp',
@@ -273,6 +279,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0777,
                               owner = 'hdfs',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
@@ -283,6 +290,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0770,
                               owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -292,7 +300,8 @@ class TestNamenode(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               action = ['create'],
-                              only_if = "su - hdfs -c 'hdfs haadmin -getServiceState nn1 | grep active > /dev/null'",
+                              bin_dir = '/usr/bin',
+                              only_if = "su - hdfs -c 'export PATH=$PATH:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active > /dev/null'",
                               )
     self.assertNoMoreResources()
 
@@ -326,9 +335,9 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
                               user = 'hdfs',
                               )
-    self.assertResourceCalled('Execute', "su - hdfs -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'",
+    self.assertResourceCalled('Execute', "su - hdfs -c 'export PATH=$PATH:/usr/bin ; hadoop --config /etc/hadoop/conf dfsadmin -safemode get' | grep 'Safe mode is OFF'",
                               tries = 40,
-                              only_if = "su - hdfs -c 'hdfs haadmin -getServiceState nn1 | grep active > /dev/null'",
+                              only_if = "su - hdfs -c 'export PATH=$PATH:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active > /dev/null'",
                               try_sleep = 10,
                               )
     self.assertResourceCalled('HdfsDirectory', '/tmp',
@@ -339,6 +348,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0777,
                               owner = 'hdfs',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
@@ -349,6 +359,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               mode = 0770,
                               owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
@@ -358,7 +369,8 @@ class TestNamenode(RMFTestCase):
                               hdfs_user = 'hdfs',
                               kinit_path_local = '/usr/bin/kinit',
                               action = ['create'],
-                              only_if = "su - hdfs -c 'hdfs haadmin -getServiceState nn1 | grep active > /dev/null'",
+                              bin_dir = '/usr/bin',
+                              only_if = "su - hdfs -c 'export PATH=$PATH:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active > /dev/null'",
                               )
     self.assertNoMoreResources()
 
@@ -377,6 +389,7 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -refreshNodes',
                               user = 'hdfs',
                               conf_dir = '/etc/hadoop/conf',
+                              bin_dir = '/usr/bin',
                               kinit_override = True)
     self.assertNoMoreResources()
 
@@ -394,7 +407,8 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('Execute', '', user = 'hdfs')
     self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes', 
                               user = 'hdfs', 
-                              conf_dir = '/etc/hadoop/conf', 
+                              conf_dir = '/etc/hadoop/conf',
+                              bin_dir = '/usr/bin',
                               kinit_override = True)
     self.assertNoMoreResources()    
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
index 78cfde7..57abab3 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
@@ -50,20 +50,31 @@ class TestServiceCheck(RMFTestCase):
         tries = 20,
         conf_dir = '/etc/hadoop/conf',
         try_sleep = 3,
+        bin_dir = '/usr/bin',
         user = 'ambari-qa',
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp ; hadoop fs -chmod 777 /tmp',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
         conf_dir = '/etc/hadoop/conf',
         logoutput = True,
-        not_if = 'hadoop fs -test -e /tmp',
+        not_if = 'hadoop --config /etc/hadoop/conf fs -test -e /tmp',
         try_sleep = 3,
         tries = 5,
+        bin_dir = '/usr/bin',
         user = 'ambari-qa',
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -rm /tmp/; hadoop fs -put /etc/passwd /tmp/',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -chmod 777 /tmp',
+        conf_dir = '/etc/hadoop/conf',
+        logoutput = True,
+        try_sleep = 3,
+        tries = 5,
+        bin_dir = '/usr/bin',
+        user = 'ambari-qa',
+    )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -rm /tmp/; hadoop --config /etc/hadoop/conf fs -put /etc/passwd /tmp/',
         logoutput = True,
         tries = 5,
         conf_dir = '/etc/hadoop/conf',
+        bin_dir = '/usr/bin',
         try_sleep = 3,
         user = 'ambari-qa',
     )
@@ -71,6 +82,7 @@ class TestServiceCheck(RMFTestCase):
         logoutput = True,
         tries = 5,
         conf_dir = '/etc/hadoop/conf',
+        bin_dir = '/usr/bin',
         try_sleep = 3,
         user = 'ambari-qa',
     )


[23/30] git commit: AMBARI-7285. Incorrect default configuration for Storm in Ambari 1.6.1. (akovalenko)

Posted by jo...@apache.org.
AMBARI-7285. Incorrect default configuration for Storm in Ambari 1.6.1. (akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3cf2ee4d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3cf2ee4d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3cf2ee4d

Branch: refs/heads/branch-alerts-dev
Commit: 3cf2ee4d483091519279cc9f0e9db5a7fce2eb71
Parents: 2a7fb78
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Fri Sep 12 20:01:23 2014 +0300
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Fri Sep 12 20:01:23 2014 +0300

----------------------------------------------------------------------
 ambari-web/app/controllers/wizard/step7_controller.js | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3cf2ee4d/ambari-web/app/controllers/wizard/step7_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step7_controller.js b/ambari-web/app/controllers/wizard/step7_controller.js
index 3ad55d3..45647bb 100644
--- a/ambari-web/app/controllers/wizard/step7_controller.js
+++ b/ambari-web/app/controllers/wizard/step7_controller.js
@@ -674,12 +674,16 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, {
    * @method resolveStormConfigs
    */
   resolveStormConfigs: function (configs) {
-    var dependentConfigs, gangliaServerHost;
+    var dependentConfigs, gangliaServerHost, gangliaHostId, hosts;
     dependentConfigs = ['nimbus.childopts', 'supervisor.childopts', 'worker.childopts'];
     // if Ganglia selected or installed, set ganglia host to configs
     if (this.get('installedServiceNames').contains('STORM') && this.get('installedServiceNames').contains('GANGLIA')) return;
     if (this.get('allSelectedServiceNames').contains('GANGLIA') || this.get('installedServiceNames').contains('GANGLIA')) {
-      gangliaServerHost = this.get('wizardController').getDBProperty('masterComponentHosts').findProperty('component', 'GANGLIA_SERVER').hostName;
+      hosts = this.get('wizardController').getDBProperty('hosts');
+      gangliaHostId = this.get('wizardController').getDBProperty('masterComponentHosts').findProperty('component', 'GANGLIA_SERVER').host_id;
+      for (var hostName in hosts) {
+        if (hosts[hostName].id == gangliaHostId) gangliaServerHost = hosts[hostName].name;
+      }
       dependentConfigs.forEach(function (configName) {
         var config = configs.findProperty('name', configName);
         var replaceStr = config.value.match(/.jar=host[^,]+/)[0];


[18/30] git commit: AMBARI-7280. Slider View: View should use existing parameters if set. (onechiporenko)

Posted by jo...@apache.org.
AMBARI-7280. Slider View: View should use existing parameters if set. (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2fbbfb37
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2fbbfb37
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2fbbfb37

Branch: refs/heads/branch-alerts-dev
Commit: 2fbbfb370155ef9adb69fc43362dd0ad1c1448a9
Parents: 941b56a
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Fri Sep 12 17:22:31 2014 +0300
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Fri Sep 12 17:22:31 2014 +0300

----------------------------------------------------------------------
 .../assets/data/resource/slider-properties.json |  64 ++++
 .../app/controllers/slider_apps_controller.js   | 187 ------------
 .../ui/app/controllers/slider_controller.js     | 294 +++++++++++++++++++
 .../src/main/resources/ui/app/helpers/ajax.js   |  44 +--
 .../src/main/resources/ui/app/initialize.js     |  16 +-
 .../ui/app/mappers/application_status.js        |  52 +++-
 6 files changed, 434 insertions(+), 223 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2fbbfb37/contrib/views/slider/src/main/resources/ui/app/assets/data/resource/slider-properties.json
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/assets/data/resource/slider-properties.json b/contrib/views/slider/src/main/resources/ui/app/assets/data/resource/slider-properties.json
new file mode 100644
index 0000000..53a3331
--- /dev/null
+++ b/contrib/views/slider/src/main/resources/ui/app/assets/data/resource/slider-properties.json
@@ -0,0 +1,64 @@
+{
+  "href" : "http://host:8080/api/v1/views/SLIDER/versions/1.0.0/instances/MySliderName",
+  "ViewInstanceInfo" : {
+    "context_path" : "/views/SLIDER/1.0.0/MySliderName",
+    "description" : "MySliderDescription",
+    "icon64_path" : null,
+    "icon_path" : null,
+    "instance_name" : "MySliderName",
+    "label" : "MySliderDisplay",
+    "static" : false,
+    "version" : "1.0.0",
+    "view_name" : "SLIDER",
+    "visible" : true,
+    "instance_data" : { },
+    "properties" : {
+      "hdfs.address" : "hdfs://slider-1.c.pramod-thangali.internal:8020",
+      "yarn.resourcemanager.address" : "slider-2.c.pramod-thangali.internal:8050",
+      "yarn.resourcemanager.scheduler.address" : "slider-2.c.pramod-thangali.internal:8030",
+      "zookeeper.quorum" : "slider-1.c.pramod-thangali.internal:2181,slider-2.c.pramod-thangali.internal:2181,slider-3.c.pramod-thangali.internal:2181"
+    }
+  },
+  "resources" : [
+    {
+      "href" : "http://host:8080/api/v1/views/SLIDER/versions/1.0.0/instances/MySliderName/resources/status",
+      "instance_name" : "MySliderName",
+      "name" : "status",
+      "version" : "1.0.0",
+      "view_name" : "SLIDER"
+    }
+  ],
+  "apps" : [
+    {
+      "href" : "http://host:8080/api/v1/views/SLIDER/versions/1.0.0/instances/MySliderName/apps/1410332504508_1",
+      "id" : "1410332504508_1",
+      "instance_name" : "MySliderName",
+      "version" : "1.0.0",
+      "view_name" : "SLIDER"
+    },
+    {
+      "href" : "http://host:8080/api/v1/views/SLIDER/versions/1.0.0/instances/MySliderName/apps/1410332504508_2",
+      "id" : "1410332504508_2",
+      "instance_name" : "MySliderName",
+      "version" : "1.0.0",
+      "view_name" : "SLIDER"
+    }
+  ],
+  "privileges" : [ ],
+  "apptypes" : [
+    {
+      "href" : "http://host:8080/api/v1/views/SLIDER/versions/1.0.0/instances/MySliderName/apptypes/HBASE",
+      "id" : "HBASE",
+      "instance_name" : "MySliderName",
+      "version" : "1.0.0",
+      "view_name" : "SLIDER"
+    },
+    {
+      "href" : "http://host:8080/api/v1/views/SLIDER/versions/1.0.0/instances/MySliderName/apptypes/STORM",
+      "id" : "STORM",
+      "instance_name" : "MySliderName",
+      "version" : "1.0.0",
+      "view_name" : "SLIDER"
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fbbfb37/contrib/views/slider/src/main/resources/ui/app/controllers/slider_apps_controller.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/controllers/slider_apps_controller.js b/contrib/views/slider/src/main/resources/ui/app/controllers/slider_apps_controller.js
index 2f422f1..1bca7e4 100644
--- a/contrib/views/slider/src/main/resources/ui/app/controllers/slider_apps_controller.js
+++ b/contrib/views/slider/src/main/resources/ui/app/controllers/slider_apps_controller.js
@@ -18,191 +18,4 @@
 
 App.SliderAppsController = Ember.ArrayController.extend({
 
-  /**
-   *  Load resources on controller initialization
-   * @method initResources
-   */
-  initResources:function () {
-    this.getClusterName();
-  },
-
-  initialValuesToLoad: Em.Object.create({
-    ambariAddress: null,
-    clusterName: null,
-    hdfsAddress: null,
-    yarnRMAddress: null,
-    yarnRMSchedulerAddress: null,
-    zookeeperQuorum: null
-  }),
-
-  zookeeperHosts: [],
-
-  /**
-   * Get cluster name from server
-   * @returns {$.ajax}
-   * @method getClusterName
-   */
-  getClusterName: function() {
-    return App.ajax.send({
-      name: 'cluster_name',
-      sender: this,
-      data: {
-        urlPrefix: '/api/v1/'
-      },
-      success: 'getClusterNameSuccessCallback'
-    });
-  },
-
-  /**
-   * Success callback for clusterName-request
-   * @param {object} data
-   * @method getClusterNameSuccessCallback
-   */
-  getClusterNameSuccessCallback: function(data) {
-    var clusterName = Em.get(data.items[0], 'Clusters.cluster_name');
-    App.set('clusterName', clusterName);
-    App.ApplicationStatusMapper.loop('load');
-    this.loadConfigsTags();
-    this.loadComponentHost({componentName:"GANGLIA_SERVER",callback:"loadGangliaHostSuccessCallback"});
-    this.loadComponentHost({componentName:"NAGIOS_SERVER",callback:"loadNagiosHostSuccessCallback"});
-    this.loadComponentHost({componentName:"ZOOKEEPER_SERVER",callback:"setZookeeperQuorum"});
-  },
-
-  loadConfigsTags: function () {
-    App.ajax.send({
-      name: 'config.tags',
-      sender: this,
-      data: {
-        urlPrefix: '/api/v1/'
-      },
-      success: 'onLoadConfigsTags'
-    });
-  },
-
-  onLoadConfigsTags: function (data) {
-    var urlParams = [];
-    if(data.Clusters.desired_configs['yarn-site'] && data.Clusters.desired_configs['zookeeper-env']){
-      var coreSiteTag = data.Clusters.desired_configs['core-site'].tag;
-      var yarnSiteTag = data.Clusters.desired_configs['yarn-site'].tag;
-      var zookeeperTag = data.Clusters.desired_configs['zookeeper-env'].tag;
-      urlParams.push('(type=core-site&tag=' + coreSiteTag + ')');
-      urlParams.push('(type=yarn-site&tag=' + yarnSiteTag + ')');
-      urlParams.push('(type=zookeeper-env&tag=' + zookeeperTag + ')');
-
-      App.ajax.send({
-        name: 'get_all_configurations',
-        sender: this,
-        data: {
-          urlParams: urlParams.join('|'),
-          urlPrefix: '/api/v1/'
-        },
-        success: 'onLoadConfigs'
-      });
-    }
-  },
-
-  onLoadConfigs: function (data) {
-    var hdfs = data.items.findProperty('type', 'core-site'),
-    yarn = data.items.findProperty('type', 'yarn-site'),
-    zookeeper = data.items.findProperty('type', 'zookeeper-env'),
-    initialValuesToLoad = this.get('initialValuesToLoad');
-    initialValuesToLoad.set('ambariAddress', location.protocol+"//"+document.location.host);
-    initialValuesToLoad.set('clusterName', App.get('clusterName'));
-    initialValuesToLoad.set('hdfsAddress', hdfs.properties['fs.defaultFS']);
-    initialValuesToLoad.set('yarnRMAddress', yarn.properties['yarn.resourcemanager.address']);
-    initialValuesToLoad.set('yarnRMSchedulerAddress', yarn.properties['yarn.resourcemanager.scheduler.address']);
-    initialValuesToLoad.set('zookeeperQuorum', zookeeper.properties.clientPort);
-    this.setZookeeperQuorum();
-  },
-
-  setZookeeperQuorum: function (data){
-    var zookeeperHosts = this.get('zookeeperHosts'),
-    hosts = [],
-    initialValuesToLoad = this.get('initialValuesToLoad');
-
-    //done
-    if(initialValuesToLoad.zookeeperQuorum !== null){
-      if(data){
-        hosts = data.items.map(function(item) {
-          return item.Hosts.host_name + ":" + initialValuesToLoad.zookeeperQuorum;
-        });
-        initialValuesToLoad.set('zookeeperQuorum', hosts.join(','));
-        this.sendInitialValues();
-      }else if(zookeeperHosts.length > 0){
-        hosts = zookeeperHosts.map(function(host) {
-          return host + ":" + initialValuesToLoad.zookeeperQuorum;
-        });
-        initialValuesToLoad.set('zookeeperQuorum', hosts.join(','));
-        this.sendInitialValues();
-      }
-    }else{
-      this.set('zookeeperHosts', data.items.mapProperty('Hosts.host_name'));
-    }
-  },
-
-  /**
-   * Send request to server to save initialValues
-   * @return {$.ajax}
-   * @method sendInitialValues
-   */
-  sendInitialValues: function () {
-    var initialValues = this.get('initialValuesToLoad');
-    return App.ajax.send({
-      name: 'saveInitialValues',
-      sender: this,
-      data: {
-        data:  {
-          ViewInstanceInfo: {
-            properties: {
-              'hdfs.address': initialValues.get('hdfsAddress'),
-              'yarn.resourcemanager.address': initialValues.get('yarnRMAddress'),
-              'yarn.resourcemanager.scheduler.address': initialValues.get('yarnRMSchedulerAddress'),
-              'zookeeper.quorum': initialValues.get('zookeeperQuorum')
-            }
-          }
-        }
-      }
-    });
-  },
-
-  /**
-   * Load ganglia server host
-   * @method loadGangliaHost
-   */
-  loadComponentHost: function (params) {
-    return App.ajax.send({
-      name: 'components_hosts',
-      sender: this,
-      data: {
-        componentName: params.componentName,
-        urlPrefix: '/api/v1/'
-      },
-      success: params.callback
-    });
-
-  },
-
-  /**
-   * Success callback for hosts-request
-   * Save host name to gangliaHost
-   * @param {Object} data
-   * @method loadGangliaHostSuccessCallback
-   */
-  loadGangliaHostSuccessCallback: function (data) {
-    if(data.items[0]){
-      App.set('gangliaHost', Em.get(data.items[0], 'Hosts.host_name'));
-    }
-  },
-
-  /**
-   * Success callback for hosts-request
-   * Save host name to nagiosHost
-   * @param {Object} data
-   * @method loadGangliaHostSuccessCallback
-   */
-  loadNagiosHostSuccessCallback: function (data) {
-    if(data.items[0]){
-      App.set('nagiosHost', Em.get(data.items[0], 'Hosts.host_name'));
-    }
-  }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fbbfb37/contrib/views/slider/src/main/resources/ui/app/controllers/slider_controller.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/controllers/slider_controller.js b/contrib/views/slider/src/main/resources/ui/app/controllers/slider_controller.js
new file mode 100644
index 0000000..8283de1
--- /dev/null
+++ b/contrib/views/slider/src/main/resources/ui/app/controllers/slider_controller.js
@@ -0,0 +1,294 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Checks Slider-properties.
+ * If they are not available, uses Ambari-configs to populate them:
+ *  - Load cluster name
+ *  - Load hostName for GANGLIA_SERVER
+ *  - Load hostName for NAGIOS_SERVER
+ *  - Load hostNames for ZOOKEEPER_SERVER
+ *  - Load config tags
+ *  - Load configs
+ *  - Save Slider-properties
+ * If Slider-properties exists:
+ *  - Load cluster name
+ *  - Load hostNames
+ * @type {Ember.Controller}
+ */
+App.SliderController = Ember.Controller.extend({
+
+  /**
+   *  Load resources on controller initialization
+   * @method initResources
+   */
+  initResources: function () {
+    this.getParametersFromViewProperties();
+  },
+
+  /**
+   * List of Slider-properties mapped from Ambari-configs
+   * @type {Em.Object}
+   */
+  initialValuesToLoad: Em.Object.create({
+    ambariAddress: null,
+    clusterName: null,
+    hdfsAddress: null,
+    yarnRMAddress: null,
+    yarnRMSchedulerAddress: null,
+    zookeeperQuorum: null
+  }),
+
+  /**
+   * List of host names with ZOOKEEPER_SERVER installed
+   * @type {string[]}
+   */
+  zookeeperHosts: [],
+
+  /**
+   * Get Slider properties from View-parameters (set in the Ambari Admin View)
+   * If parameters can't be found, use Ambari-configs to populate Slider properties
+   * @returns {$.ajax}
+   * @method getParametersFromViewProperties
+   */
+  getParametersFromViewProperties: function() {
+    return App.ajax.send({
+      name: 'slider.getViewParams',
+      sender: this,
+      success: 'getParametersFromViewPropertiesSuccessCallback',
+      error: 'getClusterName'
+    });
+  },
+
+  /**
+   * Check if Slider-properties exist
+   * If not - get Ambari configs to populate Slider properties
+   * @param {object} data
+   * @method getParametersFromViewPropertiesSuccessCallback
+   */
+  getParametersFromViewPropertiesSuccessCallback: function(data) {
+    var properties = Em.get(data, 'ViewInstanceInfo.properties'),
+      loadConfigs = Em.isNone(properties);
+      this.getClusterName(loadConfigs);
+  },
+
+  /**
+   * Get cluster name from server
+   * @returns {$.ajax}
+   * @method getClusterName
+   */
+  getClusterName: function (loadConfigs) {
+    if (Em.isNone(loadConfigs)) loadConfigs = true;
+    return App.ajax.send({
+      name: 'cluster_name',
+      sender: this,
+      data: {
+        urlPrefix: '/api/v1/',
+        loadConfigs: loadConfigs
+      },
+      success: 'getClusterNameSuccessCallback'
+    });
+  },
+
+  /**
+   * Success callback for clusterName-request
+   * @param {object} data
+   * @param {object} opt
+   * @param {object} params
+   * @method getClusterNameSuccessCallback
+   */
+  getClusterNameSuccessCallback: function (data, opt, params) {
+    var clusterName = Em.get(data.items[0], 'Clusters.cluster_name');
+    App.set('clusterName', clusterName);
+    App.ApplicationStatusMapper.loop('load');
+    this.loadComponentHost({componentName: "GANGLIA_SERVER", callback: "loadGangliaHostSuccessCallback"});
+    this.loadComponentHost({componentName: "NAGIOS_SERVER", callback: "loadNagiosHostSuccessCallback"});
+    this.loadComponentHost({componentName: "ZOOKEEPER_SERVER", callback: "setZookeeperQuorum"});
+    if(params.loadConfigs) {
+      this.loadConfigsTags();
+    }
+  },
+
+  /**
+   * Load config tags from server
+   * @returns {$.ajax}
+   * @method loadConfigsTags
+   */
+  loadConfigsTags: function () {
+    return App.ajax.send({
+      name: 'config.tags',
+      sender: this,
+      data: {
+        urlPrefix: '/api/v1/'
+      },
+      success: 'onLoadConfigsTags'
+    });
+  },
+
+  /**
+   * Success callback for <code>loadConfigsTags</code>
+   * Get configs for selected tags
+   * @param {object} data
+   * @method onLoadConfigsTags
+   */
+  onLoadConfigsTags: function (data) {
+    var urlParams = [];
+    if (data.Clusters.desired_configs['yarn-site'] && data.Clusters.desired_configs['zookeeper-env']) {
+      var coreSiteTag = data.Clusters.desired_configs['core-site'].tag;
+      var yarnSiteTag = data.Clusters.desired_configs['yarn-site'].tag;
+      var zookeeperTag = data.Clusters.desired_configs['zookeeper-env'].tag;
+      urlParams.push('(type=core-site&tag=' + coreSiteTag + ')');
+      urlParams.push('(type=yarn-site&tag=' + yarnSiteTag + ')');
+      urlParams.push('(type=zookeeper-env&tag=' + zookeeperTag + ')');
+
+      App.ajax.send({
+        name: 'get_all_configurations',
+        sender: this,
+        data: {
+          urlParams: urlParams.join('|'),
+          urlPrefix: '/api/v1/'
+        },
+        success: 'onLoadConfigs'
+      });
+    }
+  },
+
+  /**
+   * Success callback for <code>onLoadConfigs</code>
+   * Set properties for <code>initialValuesToLoad</code> using loaded configs
+   * @param {object} data
+   * @method onLoadConfigs
+   */
+  onLoadConfigs: function (data) {
+    var hdfs = data.items.findProperty('type', 'core-site'),
+      yarn = data.items.findProperty('type', 'yarn-site'),
+      zookeeper = data.items.findProperty('type', 'zookeeper-env'),
+      initialValuesToLoad = this.get('initialValuesToLoad');
+    initialValuesToLoad.set('ambariAddress', location.protocol + "//" + document.location.host);
+    initialValuesToLoad.set('clusterName', App.get('clusterName'));
+    initialValuesToLoad.set('hdfsAddress', hdfs.properties['fs.defaultFS']);
+    initialValuesToLoad.set('yarnRMAddress', yarn.properties['yarn.resourcemanager.address']);
+    initialValuesToLoad.set('yarnRMSchedulerAddress', yarn.properties['yarn.resourcemanager.scheduler.address']);
+    initialValuesToLoad.set('zookeeperQuorum', zookeeper.properties.clientPort);
+    this.setZookeeperQuorum();
+  },
+
+  /**
+   * Set value for <code>initialValuesToLoad.zookeeperQuorum</code>
+   * Also do request to save Slider-properties
+   * @param {object} data
+   * @method setZookeeperQuorum
+   */
+  setZookeeperQuorum: function (data) {
+    var zookeeperHosts = this.get('zookeeperHosts'),
+      hosts = [],
+      initialValuesToLoad = this.get('initialValuesToLoad');
+
+    //done
+    if (!Em.isNone(initialValuesToLoad.zookeeperQuorum)) {
+      if (data) {
+        hosts = data.items.map(function (item) {
+          return item.Hosts.host_name + ":" + initialValuesToLoad.zookeeperQuorum;
+        });
+        initialValuesToLoad.set('zookeeperQuorum', hosts.join(','));
+        this.sendInitialValues();
+      }
+      else {
+        if (zookeeperHosts.length > 0) {
+          hosts = zookeeperHosts.map(function (host) {
+            return host + ":" + initialValuesToLoad.zookeeperQuorum;
+          });
+          initialValuesToLoad.set('zookeeperQuorum', hosts.join(','));
+          this.sendInitialValues();
+        }
+      }
+    }
+    else {
+      this.set('zookeeperHosts', data.items.mapProperty('Hosts.host_name'));
+    }
+  },
+
+  /**
+   * Send request to server to save initialValues
+   * @return {$.ajax}
+   * @method sendInitialValues
+   */
+  sendInitialValues: function () {
+    var initialValues = this.get('initialValuesToLoad');
+    return App.ajax.send({
+      name: 'saveInitialValues',
+      sender: this,
+      data: {
+        data: {
+          ViewInstanceInfo: {
+            properties: {
+              'hdfs.address': initialValues.get('hdfsAddress'),
+              'yarn.resourcemanager.address': initialValues.get('yarnRMAddress'),
+              'yarn.resourcemanager.scheduler.address': initialValues.get('yarnRMSchedulerAddress'),
+              'zookeeper.quorum': initialValues.get('zookeeperQuorum')
+            }
+          }
+        }
+      }
+    });
+  },
+
+  /**
+   * Load host for component
+   * @param {{componentName: string, callback: string}} params
+   * @return {$.ajax}
+   * @method loadGangliaHost
+   */
+  loadComponentHost: function (params) {
+    return App.ajax.send({
+      name: 'components_hosts',
+      sender: this,
+      data: {
+        componentName: params.componentName,
+        urlPrefix: '/api/v1/'
+      },
+      success: params.callback
+    });
+
+  },
+
+  /**
+   * Success callback for hosts-request
+   * Save host name to GANGLIA_SERVER (set in <code>App.gangliaHost</code>)
+   * @param {Object} data
+   * @method loadGangliaHostSuccessCallback
+   */
+  loadGangliaHostSuccessCallback: function (data) {
+    if (data.items[0]) {
+      App.set('gangliaHost', Em.get(data.items[0], 'Hosts.host_name'));
+    }
+  },
+
+  /**
+   * Success callback for hosts-request
+   * Save host name to NAGIOS_SERVER (set in <code>App.nagiosHost</code>)
+   * @param {Object} data
+   * @method loadGangliaHostSuccessCallback
+   */
+  loadNagiosHostSuccessCallback: function (data) {
+    if (data.items[0]) {
+      App.set('nagiosHost', Em.get(data.items[0], 'Hosts.host_name'));
+    }
+  }
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fbbfb37/contrib/views/slider/src/main/resources/ui/app/helpers/ajax.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/helpers/ajax.js b/contrib/views/slider/src/main/resources/ui/app/helpers/ajax.js
index 6d7e4a1..3da0624 100644
--- a/contrib/views/slider/src/main/resources/ui/app/helpers/ajax.js
+++ b/contrib/views/slider/src/main/resources/ui/app/helpers/ajax.js
@@ -30,11 +30,16 @@
  */
 var urls = {
 
+  'slider.getViewParams': {
+    real: '',
+    mock: '/data/resource/slider-properties.json'
+  },
+
   'mapper.applicationTypes': {
     real: 'apptypes?fields=*',
     mock: '/data/apptypes/all_fields.json',
     headers: {
-      Accept : "text/plain; charset=utf-8",
+      Accept: "text/plain; charset=utf-8",
       "Content-Type": "text/plain; charset=utf-8"
     }
   },
@@ -43,7 +48,7 @@ var urls = {
     real: 'apps/?fields=*',
     mock: '/data/apps/apps.json',
     headers: {
-      Accept : "text/plain; charset=utf-8",
+      Accept: "text/plain; charset=utf-8",
       "Content-Type": "text/plain; charset=utf-8"
     }
   },
@@ -58,7 +63,7 @@ var urls = {
     headers: {
       "Content-Type": "text/plain; charset=utf-8"
     },
-    format: function(data) {
+    format: function (data) {
       return {
         type: 'PUT',
         data: JSON.stringify(data.data)
@@ -72,7 +77,7 @@ var urls = {
     headers: {
       "Content-Type": "text/plain; charset=utf-8"
     },
-    format: function(data) {
+    format: function (data) {
       return {
         type: 'POST',
         data: JSON.stringify(data.data)
@@ -83,7 +88,7 @@ var urls = {
   'destroyApp': {
     real: 'apps/{id}',
     mock: '',
-    format: function() {
+    format: function () {
       return {
         method: 'DELETE'
       }
@@ -96,7 +101,7 @@ var urls = {
     headers: {
       "Content-Type": "text/plain; charset=utf-8"
     },
-    format: function(data) {
+    format: function (data) {
       return {
         method: 'PUT',
         data: JSON.stringify(data.data)
@@ -109,7 +114,7 @@ var urls = {
     headers: {
       "Content-Type": "text/plain; charset=utf-8"
     },
-    format: function(data) {
+    format: function (data) {
       return {
         method: 'PUT',
         data: JSON.stringify(data.data)
@@ -119,18 +124,18 @@ var urls = {
 
   'service_status': {
     real: 'clusters/{clusterName}/services?fields=ServiceInfo/state&minimal_response=true',
-    mock:'/data/resource/service_status.json',
+    mock: '/data/resource/service_status.json',
     headers: {
-      Accept : "text/plain; charset=utf-8",
+      Accept: "text/plain; charset=utf-8",
       "Content-Type": "text/plain; charset=utf-8"
     }
   },
 
   'components_hosts': {
     real: 'clusters/{clusterName}/hosts?host_components/HostRoles/component_name={componentName}&minimal_response=true',
-    mock:'/data/resource/components_hosts.json',
+    mock: '/data/resource/components_hosts.json',
     headers: {
-      Accept : "text/plain; charset=utf-8",
+      Accept: "text/plain; charset=utf-8",
       "Content-Type": "text/plain; charset=utf-8"
     }
   },
@@ -139,7 +144,7 @@ var urls = {
     real: 'clusters/{clusterName}/configurations/service_config_versions?service_name={serviceName}&is_current=true',
     mock: '/data/resource/service_configs.json',
     headers: {
-      Accept : "text/plain; charset=utf-8",
+      Accept: "text/plain; charset=utf-8",
       "Content-Type": "text/plain; charset=utf-8"
     }
   },
@@ -147,7 +152,7 @@ var urls = {
   'config.tags': {
     'real': 'clusters/{clusterName}?fields=Clusters/desired_configs',
     headers: {
-      Accept : "text/plain; charset=utf-8",
+      Accept: "text/plain; charset=utf-8",
       "Content-Type": "text/plain; charset=utf-8"
     }
   },
@@ -155,16 +160,16 @@ var urls = {
   'get_all_configurations': {
     'real': 'clusters/{clusterName}/configurations?{urlParams}',
     headers: {
-      Accept : "text/plain; charset=utf-8",
+      Accept: "text/plain; charset=utf-8",
       "Content-Type": "text/plain; charset=utf-8"
     }
   },
 
   'cluster_name': {
     real: 'clusters',
-    mock:'/data/resource/cluster_name.json',
+    mock: '/data/resource/cluster_name.json',
     headers: {
-      Accept : "text/plain; charset=utf-8",
+      Accept: "text/plain; charset=utf-8",
       "Content-Type": "text/plain; charset=utf-8"
     }
   },
@@ -234,10 +239,11 @@ var formatRequest = function (data) {
   }
   else {
     var prefix = App.get('urlPrefix');
-    if(Em.get(data, 'urlPrefix')){
-      var prefix = Em.get(data, 'urlPrefix');
+    if (Em.get(data, 'urlPrefix')) {
+      prefix = Em.get(data, 'urlPrefix');
     }
-    opt.url = prefix + (formatUrl(this.real, data) ? formatUrl(this.real, data) : "");
+    var url = formatUrl(this.real, data);
+    opt.url = prefix + (url ? url : '');
   }
 
   if (this.format) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fbbfb37/contrib/views/slider/src/main/resources/ui/app/initialize.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/initialize.js b/contrib/views/slider/src/main/resources/ui/app/initialize.js
index 4fc797a..c9b53d7 100755
--- a/contrib/views/slider/src/main/resources/ui/app/initialize.js
+++ b/contrib/views/slider/src/main/resources/ui/app/initialize.js
@@ -90,10 +90,22 @@ App.initializer({
        * List of errors
        * @type {string[]}
        */
-      viewErrors: []
+      viewErrors: [],
+
+      /**
+       * Host with Nagios Server
+       * @type {string|null}
+       */
+      nagiosHost: null,
+
+      /**
+       * Host with Ganglia Server
+       * @type {string|null}
+       */
+      gangliaHost: null
 
     });
-    application.SliderAppsController.proto().initResources();
+    application.SliderController.proto().initResources();
     application.ApplicationTypeMapper.loop('load');
     application.SliderAppsMapper.loop('load');
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fbbfb37/contrib/views/slider/src/main/resources/ui/app/mappers/application_status.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/mappers/application_status.js b/contrib/views/slider/src/main/resources/ui/app/mappers/application_status.js
index f3d3968..21e4c55 100644
--- a/contrib/views/slider/src/main/resources/ui/app/mappers/application_status.js
+++ b/contrib/views/slider/src/main/resources/ui/app/mappers/application_status.js
@@ -38,7 +38,7 @@ App.ApplicationStatusMapper = App.Mapper.createWithMixins(App.RunPeriodically, {
    * @method load
    * @return {$.ajax}
    */
-  load: function() {
+  load: function () {
     return App.ajax.send({
       name: 'mapper.applicationStatus',
       sender: this,
@@ -51,13 +51,18 @@ App.ApplicationStatusMapper = App.Mapper.createWithMixins(App.RunPeriodically, {
    * @param {object} data received from server data
    * @method setResourcesVersion
    */
-  setResourcesVersion: function(data) {
-    App.set('resourcesVersion', Em.get(data, "version") ? Em.get(data, "version") : "version" );
-    if(App.get('clusterName')){
+  setResourcesVersion: function (data) {
+    App.set('resourcesVersion', Em.getWithDefault(data, "version", 'version'));
+    if (App.get('clusterName')) {
       this.loadServicesStatus();
     }
   },
 
+  /**
+   * Get Services status from Ambari-server
+   * @returns {$.ajax}
+   * @method loadServicesStatus
+   */
   loadServicesStatus: function () {
     return App.ajax.send({
       name: 'service_status',
@@ -69,25 +74,42 @@ App.ApplicationStatusMapper = App.Mapper.createWithMixins(App.RunPeriodically, {
     });
   },
 
+  /**
+   * Success-callback for load services status
+   * Set Slider state basing on <code>ServiceInfo.state</code>
+   * @param {object} data
+   * @method setErrors
+   */
   setErrors: function (data) {
     var self = this,
-    errors = [];
-    this.get('servicesWeNeed').forEach( function (serviceName) {
-      self.findError(data.items.findProperty("ServiceInfo.service_name", serviceName), errors);
+      errors = [];
+    this.get('servicesWeNeed').forEach(function (serviceName) {
+      var e = self.findError(data.items.findProperty("ServiceInfo.service_name", serviceName));
+      if (!Em.isNone(e)) {
+        errors.push(e);
+      }
     });
 
-    App.set('viewEnabled', (errors.length > 0 ? false : true));
+    App.set('viewEnabled', errors.length === 0);
     App.set('viewErrors', errors);
   },
 
-  findError: function (data, errors){
-    var name = Em.get(data, "ServiceInfo.service_name")
-    if(data){
-      if(Em.get(data, "ServiceInfo.state") != "STARTED")
-        errors.push(Em.I18n.t('error.start'+name));
-    }else{
-      errors.push(Em.I18n.t('error.no'+name));
+  /**
+   * Get error for service (missed or not started)
+   * @param {object} data
+   * @returns {string|null}
+   */
+  findError: function (data) {
+    var name = Em.get(data, "ServiceInfo.service_name"),
+      error = null;
+    if (data) {
+      if (Em.get(data, "ServiceInfo.state") != "STARTED")
+        error = Em.I18n.t('error.start' + name);
+    }
+    else {
+      error = Em.I18n.t('error.no' + name);
     }
+    return error;
   }
 
 });
\ No newline at end of file


[29/30] git commit: AMBARI-7221 - Ambari Server REST API Memory Leak (jonathanhurley)

Posted by jo...@apache.org.
AMBARI-7221 - Ambari Server REST API Memory Leak (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/33557337
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/33557337
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/33557337

Branch: refs/heads/branch-alerts-dev
Commit: 33557337bb7bc917fa08509587031e077652803d
Parents: 4209a49
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Sep 9 21:30:46 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Sep 12 19:47:24 2014 -0400

----------------------------------------------------------------------
 ambari-server/conf/unix/ambari.properties       |  3 ++
 .../server/configuration/Configuration.java     | 44 ++++++++++++++------
 .../ambari/server/controller/AmbariServer.java  | 22 +++++++---
 3 files changed, 52 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/33557337/ambari-server/conf/unix/ambari.properties
----------------------------------------------------------------------
diff --git a/ambari-server/conf/unix/ambari.properties b/ambari-server/conf/unix/ambari.properties
index 41cada7..f1bb88c 100644
--- a/ambari-server/conf/unix/ambari.properties
+++ b/ambari-server/conf/unix/ambari.properties
@@ -53,3 +53,6 @@ agent.threadpool.size.max=25
 
 # linux open-file limit
 ulimit.open.files=10000
+
+# Server HTTP settings
+server.http.session.inactive_timeout=1800

http://git-wip-us.apache.org/repos/asf/ambari/blob/33557337/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 78fd7b6..9bdbc31 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -261,12 +261,11 @@ public class Configuration {
   private static final String RESOURCES_DIR_DEFAULT =
       "/var/lib/ambari-server/resources/";
   private static final String ANONYMOUS_AUDIT_NAME_KEY = "anonymous.audit.name";
-  private static final String CLIENT_SECURITY_DEFAULT = "local";
+
   private static final int CLIENT_API_PORT_DEFAULT = 8080;
   private static final int CLIENT_API_SSL_PORT_DEFAULT = 8443;
-  private static final String USER_ROLE_NAME_DEFAULT = "user";
-  private static final String ADMIN_ROLE_NAME_DEFAULT = "admin";
   private static final String LDAP_BIND_ANONYMOUSLY_DEFAULT = "true";
+
   //TODO For embedded server only - should be removed later
   private static final String LDAP_PRIMARY_URL_DEFAULT = "localhost:33389";
   private static final String LDAP_BASE_DN_DEFAULT = "dc=ambari,dc=apache,dc=org";
@@ -310,6 +309,8 @@ public class Configuration {
   private static final String VIEW_EXTRACTION_THREADPOOL_TIMEOUT_KEY = "view.extraction.threadpool.timeout";
   private static final long VIEW_EXTRACTION_THREADPOOL_TIMEOUT_DEFAULT = 100000L;
 
+  private static final String SERVER_HTTP_SESSION_INACTIVE_TIMEOUT = "server.http.session.inactive_timeout";
+
   private static final Logger LOG = LoggerFactory.getLogger(
       Configuration.class);
   private Properties properties;
@@ -404,7 +405,7 @@ public class Configuration {
     }
     configsMap.put(SRVR_CRT_PASS_KEY, password);
 
-    if (this.getApiSSLAuthentication()) {
+    if (getApiSSLAuthentication()) {
       LOG.info("API SSL Authentication is turned on.");
       File httpsPassFile = new File(configsMap.get(CLIENT_API_SSL_KSTR_DIR_NAME_KEY)
         + File.separator + configsMap.get(CLIENT_API_SSL_CRT_PASS_FILE_NAME_KEY));
@@ -467,14 +468,14 @@ public class Configuration {
   private synchronized void loadCredentialProvider() {
     if (!credentialProviderInitialized) {
       try {
-        this.credentialProvider = new CredentialProvider(null,
+        credentialProvider = new CredentialProvider(null,
           getMasterKeyLocation(), isMasterKeyPersisted());
       } catch (Exception e) {
         LOG.info("Credential provider creation failed. Reason: " + e.getMessage());
         if (LOG.isDebugEnabled()) {
           e.printStackTrace();
         }
-        this.credentialProvider = null;
+        credentialProvider = null;
       }
       credentialProviderInitialized = true;
     }
@@ -490,8 +491,9 @@ public class Configuration {
     //Get property file stream from classpath
     InputStream inputStream = Configuration.class.getClassLoader().getResourceAsStream(CONFIG_FILE);
 
-    if (inputStream == null)
+    if (inputStream == null) {
       throw new RuntimeException(CONFIG_FILE + " not found in classpath");
+    }
 
     // load the properties
     try {
@@ -534,8 +536,9 @@ public class Configuration {
   public String getBootSetupAgentPassword() {
     String pass = configsMap.get(PASSPHRASE_KEY);
 
-    if (null != pass)
+    if (null != pass) {
       return pass;
+    }
 
     // fallback
     return properties.getProperty(BOOTSTRAP_SETUP_AGENT_PASSWORD, "password");
@@ -688,8 +691,9 @@ public class Configuration {
 
   public String getLocalDatabaseUrl() {
     String dbName = properties.getProperty(SERVER_DB_NAME_KEY);
-    if(dbName == null || dbName.isEmpty())
+    if(dbName == null || dbName.isEmpty()) {
       throw new RuntimeException("Server DB Name is not configured!");
+    }
 
     return JDBC_LOCAL_URL + dbName;
   }
@@ -705,10 +709,11 @@ public class Configuration {
       dbpasswd = readPasswordFromStore(passwdProp);
     }
 
-    if (dbpasswd != null)
+    if (dbpasswd != null) {
       return dbpasswd;
-    else
+    } else {
       return readPasswordFromFile(passwdProp, SERVER_JDBC_USER_PASSWD_DEFAULT);
+    }
   }
 
   public String getRcaDatabaseDriver() {
@@ -727,8 +732,9 @@ public class Configuration {
     String passwdProp = properties.getProperty(SERVER_JDBC_RCA_USER_PASSWD_KEY);
     if (passwdProp != null) {
       String dbpasswd = readPasswordFromStore(passwdProp);
-      if (dbpasswd != null)
+      if (dbpasswd != null) {
         return dbpasswd;
+      }
     }
     return readPasswordFromFile(passwdProp, SERVER_JDBC_RCA_USER_PASSWD_DEFAULT);
   }
@@ -1084,4 +1090,18 @@ public class Configuration {
     return Long.parseLong(properties.getProperty(
         VIEW_EXTRACTION_THREADPOOL_TIMEOUT_KEY, String.valueOf(VIEW_EXTRACTION_THREADPOOL_TIMEOUT_DEFAULT)));
   }
+
+  /**
+   * Gets the inactivity timeout value, in seconds, for sessions created in
+   * Jetty by Spring Security. Without this timeout value, each request to the
+   * REST APIs will create new sessions that are never reaped since their
+   * default time is -1.
+   *
+   * @return the time value or {@code 1800} seconds for default.
+   */
+  public int getHttpSessionInactiveTimeout() {
+    return Integer.parseInt(properties.getProperty(
+        SERVER_HTTP_SESSION_INACTIVE_TIMEOUT,
+        "1800"));
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/33557337/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index fc74e00..e109f7e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -230,8 +230,18 @@ public class AmbariServer {
       root.setErrorHandler(injector.getInstance(AmbariErrorHandler.class));
       root.getSessionHandler().setSessionManager(sessionManager);
 
-      //Changing session cookie name to avoid conflicts
-      root.getSessionHandler().getSessionManager().setSessionCookie("AMBARISESSIONID");
+      SessionManager jettySessionManager = root.getSessionHandler().getSessionManager();
+
+      // use AMBARISESSIONID instead of JSESSIONID to avoid conflicts with
+      // other services (like HDFS) that run on the same context but a different
+      // port
+      jettySessionManager.setSessionCookie("AMBARISESSIONID");
+
+      // each request that does not use AMBARISESSIONID will create a new
+      // HashedSession in Jetty; these MUST be reaped after inactivity in order
+      // to prevent a memory leak
+      int sessionInactivityTimeout = configs.getHttpSessionInactiveTimeout();
+      jettySessionManager.setMaxInactiveInterval(sessionInactivityTimeout);
 
       GenericWebApplicationContext springWebAppContext = new GenericWebApplicationContext();
       springWebAppContext.setServletContext(root.getServletContext());
@@ -246,8 +256,10 @@ public class AmbariServer {
 
       certMan.initRootCert();
 
-      ServletContextHandler agentroot = new ServletContextHandler(serverForAgent,
-          "/", ServletContextHandler.SESSIONS );
+      // the agent communication (heartbeats, registration, etc) is stateless
+      // and does not use sessions.
+      ServletContextHandler agentroot = new ServletContextHandler(
+          serverForAgent, "/", ServletContextHandler.NO_SESSIONS);
 
       ServletHolder rootServlet = root.addServlet(DefaultServlet.class, "/");
       rootServlet.setInitParameter("dirAllowed", "false");
@@ -262,8 +274,8 @@ public class AmbariServer {
       root.addFilter(new FilterHolder(injector.getInstance(AmbariPersistFilter.class)), "/proxy/*", 1);
       root.addFilter(new FilterHolder(new MethodOverrideFilter()), "/api/*", 1);
       root.addFilter(new FilterHolder(new MethodOverrideFilter()), "/proxy/*", 1);
-      agentroot.addFilter(new FilterHolder(injector.getInstance(AmbariPersistFilter.class)), "/agent/*", 1);
 
+      agentroot.addFilter(new FilterHolder(injector.getInstance(AmbariPersistFilter.class)), "/agent/*", 1);
       agentroot.addFilter(SecurityFilter.class, "/*", 1);
 
       if (configs.getApiAuthentication()) {


[04/30] git commit: AMBARI-7257 Use Versioned RPMS for HDP 2.2 stack and make it plugabable to be able to reuse the scripts for HDP 2.* (dsen)

Posted by jo...@apache.org.
AMBARI-7257 Use Versioned RPMS for HDP 2.2 stack and make it plugabable to be able to reuse the scripts for HDP 2.* (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7d9feb6a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7d9feb6a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7d9feb6a

Branch: refs/heads/branch-alerts-dev
Commit: 7d9feb6afa6cd310fb8b11221ab67f01f048dd38
Parents: 8778556
Author: Dmytro Sen <ds...@hortonworks.com>
Authored: Thu Sep 11 22:26:27 2014 +0300
Committer: Dmytro Sen <ds...@hortonworks.com>
Committed: Thu Sep 11 22:26:27 2014 +0300

----------------------------------------------------------------------
 .../libraries/providers/execute_hadoop.py       |  9 +-
 .../libraries/providers/hdfs_directory.py       | 16 +++-
 .../libraries/resources/execute_hadoop.py       |  1 +
 .../libraries/resources/hdfs_directory.py       |  1 +
 .../2.0.6/hooks/after-INSTALL/scripts/params.py | 20 +++--
 .../hooks/before-INSTALL/scripts/params.py      |  3 +-
 .../hooks/before-START/files/checkForFormat.sh  |  3 +
 .../2.0.6/hooks/before-START/scripts/params.py  | 25 ++++--
 .../services/FLUME/package/scripts/flume.py     |  2 +-
 .../FLUME/package/scripts/flume_check.py        |  2 +-
 .../services/FLUME/package/scripts/params.py    | 14 +++-
 .../HBASE/package/files/hbaseSmokeVerify.sh     |  3 +-
 .../services/HBASE/package/scripts/params.py    | 37 ++++++--
 .../HBASE/package/scripts/service_check.py      |  6 +-
 .../HDFS/package/files/checkForFormat.sh        |  4 +-
 .../HDFS/package/scripts/hdfs_namenode.py       | 15 ++--
 .../services/HDFS/package/scripts/namenode.py   |  2 +-
 .../services/HDFS/package/scripts/params.py     | 34 +++++---
 .../HDFS/package/scripts/service_check.py       | 27 ++++--
 .../2.0.6/services/HIVE/package/scripts/hcat.py |  6 ++
 .../HIVE/package/scripts/hcat_service_check.py  |  8 +-
 .../2.0.6/services/HIVE/package/scripts/hive.py |  2 +
 .../HIVE/package/scripts/hive_service.py        |  9 +-
 .../HIVE/package/scripts/install_jars.py        |  6 +-
 .../services/HIVE/package/scripts/params.py     | 73 ++++++++++------
 .../package/templates/startHiveserver2.sh.j2    |  2 +-
 .../services/OOZIE/configuration/oozie-env.xml  |  2 +-
 .../services/OOZIE/package/files/oozieSmoke2.sh |  8 +-
 .../OOZIE/package/scripts/oozie_service.py      |  4 +-
 .../services/OOZIE/package/scripts/params.py    | 24 ++++--
 .../services/PIG/package/scripts/params.py      | 20 ++++-
 .../PIG/package/scripts/service_check.py        | 10 ++-
 .../services/SQOOP/package/scripts/params.py    | 10 ++-
 .../WEBHCAT/configuration/webhcat-env.xml       |  2 +-
 .../services/WEBHCAT/package/scripts/params.py  | 41 ++++++---
 .../services/WEBHCAT/package/scripts/webhcat.py | 11 ++-
 .../services/YARN/package/scripts/params.py     | 45 ++++++----
 .../YARN/package/scripts/resourcemanager.py     |  5 +-
 .../services/YARN/package/scripts/service.py    |  2 +-
 .../YARN/package/scripts/service_check.py       |  3 +-
 .../2.0.6/services/YARN/package/scripts/yarn.py | 14 ++--
 .../ZOOKEEPER/package/scripts/params.py         | 17 +++-
 .../services/FALCON/package/scripts/params.py   | 15 +++-
 .../services/STORM/package/scripts/params.py    |  5 +-
 .../main/resources/stacks/HDP/2.2/metainfo.xml  | 23 +++++
 .../resources/stacks/HDP/2.2/repos/repoinfo.xml | 82 ++++++++++++++++++
 .../stacks/HDP/2.2/role_command_order.json      | 88 ++++++++++++++++++++
 .../stacks/HDP/2.2/services/FALCON/metainfo.xml | 28 +++++++
 .../stacks/HDP/2.2/services/FLUME/metainfo.xml  | 40 +++++++++
 .../stacks/HDP/2.2/services/HBASE/metainfo.xml  | 42 ++++++++++
 .../services/HDFS/configuration/hadoop-env.xml  | 29 +++++++
 .../services/HDFS/configuration/hdfs-site.xml   | 34 ++++++++
 .../stacks/HDP/2.2/services/HDFS/metainfo.xml   | 68 +++++++++++++++
 .../stacks/HDP/2.2/services/HIVE/metainfo.xml   | 44 ++++++++++
 .../services/OOZIE/configuration/oozie-site.xml | 38 +++++++++
 .../stacks/HDP/2.2/services/OOZIE/metainfo.xml  | 28 +++++++
 .../stacks/HDP/2.2/services/PIG/metainfo.xml    | 41 +++++++++
 .../stacks/HDP/2.2/services/SQOOP/metainfo.xml  | 29 +++++++
 .../services/STORM/configuration/storm-env.xml  | 29 +++++++
 .../services/STORM/configuration/storm-site.xml | 54 ++++++++++++
 .../stacks/HDP/2.2/services/STORM/metainfo.xml  | 29 +++++++
 .../stacks/HDP/2.2/services/TEZ/metainfo.xml    | 40 +++++++++
 .../WEBHCAT/configuration/webhcat-site.xml      | 59 +++++++++++++
 .../HDP/2.2/services/WEBHCAT/metainfo.xml       | 44 ++++++++++
 .../YARN/configuration-mapred/mapred-site.xml   | 36 ++++++++
 .../services/YARN/configuration/yarn-site.xml   | 35 ++++++++
 .../stacks/HDP/2.2/services/YARN/metainfo.xml   | 71 ++++++++++++++++
 .../HDP/2.2/services/ZOOKEEPER/metainfo.xml     | 40 +++++++++
 .../stacks/1.3.2/HDFS/test_service_check.py     | 18 +++-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |  6 ++
 .../2.0.6/HBASE/test_hbase_regionserver.py      |  6 ++
 .../2.0.6/HBASE/test_hbase_service_check.py     | 10 +--
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 36 +++++---
 .../stacks/2.0.6/HDFS/test_service_check.py     | 18 +++-
 .../stacks/2.0.6/HIVE/test_hcat_client.py       | 10 ++-
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |  8 ++
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 18 ++++
 .../2.0.6/HIVE/test_hive_service_check.py       |  7 ++
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     | 10 ++-
 .../stacks/2.0.6/PIG/test_pig_service_check.py  | 12 ++-
 .../stacks/2.0.6/WEBHCAT/test_webhcat_server.py | 12 +++
 .../stacks/2.0.6/YARN/test_historyserver.py     | 12 +++
 .../stacks/2.0.6/YARN/test_nodemanager.py       | 12 +++
 .../2.0.6/YARN/test_yarn_service_check.py       |  9 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |  2 +
 .../stacks/2.1/HIVE/test_hive_metastore.py      |  7 ++
 86 files changed, 1636 insertions(+), 196 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py b/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
index 8ab71ff..f367e99 100644
--- a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
@@ -19,6 +19,7 @@ limitations under the License.
 Ambari Agent
 
 """
+import os
 
 from resource_management import *
 
@@ -27,6 +28,7 @@ class ExecuteHadoopProvider(Provider):
     kinit__path_local = self.resource.kinit_path_local
     keytab = self.resource.keytab
     conf_dir = self.resource.conf_dir
+    bin_dir = self.resource.bin_dir
     command = self.resource.command
     principal = self.resource.principal
     
@@ -39,10 +41,15 @@ class ExecuteHadoopProvider(Provider):
           path = ['/bin'],
           user = self.resource.user
         )
-    
+
+      path = os.environ['PATH']
+      if bin_dir is not None:
+        path += os.pathsep + bin_dir
+
       Execute (format("hadoop --config {conf_dir} {command}"),
         user        = self.resource.user,
         tries       = self.resource.tries,
         try_sleep   = self.resource.try_sleep,
         logoutput   = self.resource.logoutput,
+        environment = {'PATH' : path}
       )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
index 6a40b6d..33cc1be 100644
--- a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
@@ -19,6 +19,7 @@ limitations under the License.
 Ambari Agent
 
 """
+import os
 
 from resource_management import *
 directories_list = [] #direcotries list for mkdir
@@ -68,6 +69,7 @@ class HdfsDirectoryProvider(Provider):
     secured = self.resource.security_enabled
     keytab_file = self.resource.keytab
     kinit_path = self.resource.kinit_path_local
+    bin_dir = self.resource.bin_dir
 
     chmod_commands = []
     chown_commands = []
@@ -76,7 +78,7 @@ class HdfsDirectoryProvider(Provider):
       mode = chmod_key[0]
       recursive = chmod_key[1]
       chmod_dirs_str = ' '.join(chmod_dirs)
-      chmod_commands.append(format("hadoop fs -chmod {recursive} {mode} {chmod_dirs_str}"))
+      chmod_commands.append(format("hadoop --config {hdp_conf_dir} fs -chmod {recursive} {mode} {chmod_dirs_str}"))
 
     for chown_key, chown_dirs in chown_map.items():
       owner = chown_key[0]
@@ -87,7 +89,7 @@ class HdfsDirectoryProvider(Provider):
         chown = owner
         if group:
           chown = format("{owner}:{group}")
-        chown_commands.append(format("hadoop fs -chown {recursive} {chown} {chown_dirs_str}"))
+        chown_commands.append(format("hadoop --config {hdp_conf_dir} fs -chown {recursive} {chown} {chown_dirs_str}"))
 
     if secured:
         Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
@@ -97,11 +99,17 @@ class HdfsDirectoryProvider(Provider):
     #for hadoop 2 we need to specify -p to create directories recursively
     parent_flag = '`rpm -q hadoop | grep -q "hadoop-1" || echo "-p"`'
 
-    Execute(format('hadoop fs -mkdir {parent_flag} {dir_list_str} && {chmod_cmd} && {chown_cmd}',
+    path = os.environ['PATH']
+    if bin_dir is not None:
+      path += os.pathsep + bin_dir
+
+    Execute(format('hadoop --config {hdp_conf_dir} fs -mkdir {parent_flag} {dir_list_str} && {chmod_cmd} && {chown_cmd}',
                    chmod_cmd=' && '.join(chmod_commands),
                    chown_cmd=' && '.join(chown_commands)),
             user=hdp_hdfs_user,
-            not_if=format("su - {hdp_hdfs_user} -c 'hadoop fs -ls {dir_list_str}'")
+            environment = {'PATH' : path},
+            not_if=format("su - {hdp_hdfs_user} -c 'export PATH=$PATH:{bin_dir} ; "
+                          "hadoop --config {hdp_conf_dir} fs -ls {dir_list_str}'")
     )
 
     directories_list[:] = []

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py b/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
index 94daf5b..149548d 100644
--- a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
@@ -32,6 +32,7 @@ class ExecuteHadoop(Resource):
   user = ResourceArgument()
   logoutput = BooleanArgument(default=False)
   principal = ResourceArgument(default=lambda obj: obj.user)
+  bin_dir = ResourceArgument() # appended to $PATH
   
   conf_dir = ResourceArgument()
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
index 63d9cc2..7888cd8 100644
--- a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
@@ -38,6 +38,7 @@ class HdfsDirectory(Resource):
   keytab = ResourceArgument()
   kinit_path_local = ResourceArgument()
   hdfs_user = ResourceArgument()
+  bin_dir = ResourceArgument(default="")
 
   #action 'create' immediately creates all pending directory in efficient manner
   #action 'create_delayed' add directory to list of pending directories

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index d537199..389d6ab 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -19,17 +19,29 @@ limitations under the License.
 
 from resource_management import *
 from resource_management.core.system import System
-import os
 
 config = Script.get_config()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_conf_empty_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf.empty")
+  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
+  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 #java params
 java_home = config['hostLevelParams']['java_home']
 #hadoop params
-hadoop_conf_dir = "/etc/hadoop/conf"
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
@@ -56,8 +68,6 @@ ttnode_heapsize = "1024m"
 
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
 
 #users and groups

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
index 01789a7..5700e28 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
@@ -19,7 +19,6 @@ limitations under the License.
 
 from resource_management import *
 from resource_management.core.system import System
-import os
 import json
 import collections
 
@@ -38,6 +37,8 @@ user_group = config['configurations']['cluster-env']['user_group']
 proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")
 nagios_group = config['configurations']['nagios-env']['nagios_group']
 
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+
 #hosts
 hostname = config["hostname"]
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/checkForFormat.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/checkForFormat.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/checkForFormat.sh
index f92f613..9036ab2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/checkForFormat.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/checkForFormat.sh
@@ -24,6 +24,8 @@ export hdfs_user=$1
 shift
 export conf_dir=$1
 shift
+export bin_dir=$1
+shift
 export mark_dir=$1
 shift
 export name_dirs=$*
@@ -50,6 +52,7 @@ if [[ ! -d $mark_dir ]] ; then
   done
 
   if [[ $EXIT_CODE == 0 ]] ; then
+    export PATH=$PATH:$bin_dir
     su - ${hdfs_user} -c "yes Y | hadoop --config ${conf_dir} ${command}"
   else
     echo "ERROR: Namenode directory(s) is non empty. Will not format the namenode. List of non-empty namenode dirs ${list_of_non_empty_dirs}"

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index fc525a6..8fb2d90 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -23,6 +23,25 @@ import os
 
 config = Script.get_config()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
+  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
+  hadoop_lib_home = format("/usr/hdp/{rpm_version}/hadoop/lib")
+  hadoop_bin = format("/usr/hdp/{rpm_version}/hadoop/sbin")
+  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+  hadoop_lib_home = "/usr/lib/hadoop/lib"
+  hadoop_bin = "/usr/lib/hadoop/sbin"
+  hadoop_home = '/usr'
+
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
@@ -72,11 +91,7 @@ if has_ganglia_server:
 
 if has_namenode:
   hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
-hadoop_lib_home = "/usr/lib/hadoop/lib"
-hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
-hadoop_home = "/usr"
-hadoop_bin = "/usr/lib/hadoop/sbin"
 
 task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
 
@@ -127,8 +142,6 @@ ttnode_heapsize = "1024m"
 
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
 
 #log4j.properties

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
index 6109d3e..1404d27 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
@@ -63,7 +63,7 @@ def flume(action = None):
       _set_desired_state('STARTED')
       
     flume_base = format('su -s /bin/bash {flume_user} -c "export JAVA_HOME={java_home}; '
-      '/usr/bin/flume-ng agent --name {{0}} --conf {{1}} --conf-file {{2}} {{3}}"')
+      '{flume_bin} agent --name {{0}} --conf {{1}} --conf-file {{2}} {{3}}"')
 
     for agent in cmd_target_names():
       flume_agent_conf_dir = params.flume_conf_dir + os.sep + agent

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_check.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_check.py
index 3036e20..b93b8e8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_check.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_check.py
@@ -31,7 +31,7 @@ class FlumeServiceCheck(Script):
       Execute(format("{kinit_path_local} -kt {http_keytab} {principal_replaced}"),
               user=params.smoke_user)
 
-    Execute(format('env JAVA_HOME={java_home} /usr/bin/flume-ng version'),
+    Execute(format('env JAVA_HOME={java_home} {flume_bin} version'),
             logoutput=True,
             tries = 3,
             try_sleep = 20)

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
index 128eed4..c1f8804 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
@@ -26,9 +26,19 @@ proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 
 security_enabled = False
 
-java_home = config['hostLevelParams']['java_home']
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  flume_conf_dir = format('/usr/hdp/{rpm_version}/etc/flume/conf')
+  flume_bin = format('/usr/hdp/{rpm_version}/flume/bin/flume-ng')
 
-flume_conf_dir = '/etc/flume/conf'
+else:
+  flume_conf_dir = '/etc/flume/conf'
+  flume_bin = '/usr/bin/flume-ng'
+
+java_home = config['hostLevelParams']['java_home']
 flume_log_dir = '/var/log/flume'
 flume_run_dir = '/var/run/flume'
 flume_user = 'flume'

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/files/hbaseSmokeVerify.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/files/hbaseSmokeVerify.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/files/hbaseSmokeVerify.sh
index eedffd3..5c320c0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/files/hbaseSmokeVerify.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/files/hbaseSmokeVerify.sh
@@ -21,7 +21,8 @@
 #
 conf_dir=$1
 data=$2
-echo "scan 'ambarismoketest'" | hbase --config $conf_dir shell > /tmp/hbase_chk_verify
+hbase_cmd=$3
+echo "scan 'ambarismoketest'" | $hbase_cmd --config $conf_dir shell > /tmp/hbase_chk_verify
 cat /tmp/hbase_chk_verify
 echo "Looking for $data"
 grep -q $data /tmp/hbase_chk_verify

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
index 364649c..d07ebd1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
@@ -26,11 +26,32 @@ import status_params
 config = Script.get_config()
 exec_tmp_dir = Script.get_tmp_dir()
 
-hbase_conf_dir = "/etc/hbase/conf"
-daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
-region_mover = "/usr/lib/hbase/bin/region_mover.rb"
-region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
-hbase_cmd = "/usr/lib/hbase/bin/hbase"
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+#RPM versioning support
+  rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  hbase_conf_dir = format('/usr/hdp/{rpm_version}/etc/hbase/conf')
+  daemon_script = format('/usr/hdp/{rpm_version}/hbase/bin/hbase-daemon.sh')
+  region_mover = format('/usr/hdp/{rpm_version}/hbase/bin/region_mover.rb')
+  region_drainer = format('/usr/hdp/{rpm_version}hbase/bin/draining_servers.rb')
+  hbase_cmd = format('/usr/hdp/{rpm_version}/hbase/bin/hbase')
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_bin_dir = "/usr/bin"
+  hbase_conf_dir = "/etc/hbase/conf"
+  daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
+  region_mover = "/usr/lib/hbase/bin/region_mover.rb"
+  region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
+  hbase_cmd = "/usr/lib/hbase/bin/hbase"
+
 hbase_excluded_hosts = config['commandParams']['excluded_hosts']
 hbase_drain_only = config['commandParams']['mark_draining_only']
 hbase_included_hosts = config['commandParams']['included_hosts']
@@ -72,7 +93,7 @@ if 'slave_hosts' in config['clusterHostInfo']:
   rs_hosts = default('/clusterHostInfo/hbase_rs_hosts', '/clusterHostInfo/slave_hosts') #if hbase_rs_hosts not given it is assumed that region servers on same nodes as slaves
 else:
   rs_hosts = default('/clusterHostInfo/hbase_rs_hosts', '/clusterHostInfo/all_hosts') 
-  
+
 smoke_test_user = config['configurations']['cluster-env']['smokeuser']
 smokeuser_permissions = "RWXCA"
 service_check_data = functions.get_unique_id_and_date()
@@ -105,7 +126,6 @@ hbase_hdfs_root_dir = config['configurations']['hbase-site']['hbase.rootdir']
 hbase_staging_dir = "/apps/hbase/staging"
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -119,5 +139,6 @@ HdfsDirectory = functools.partial(
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/service_check.py
index 8fb38f7..15a306b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/service_check.py
@@ -44,7 +44,7 @@ class HbaseServiceCheck(Script):
     
     if params.security_enabled:    
       hbase_grant_premissions_file = format("{exec_tmp_dir}/hbase_grant_permissions.sh")
-      grantprivelegecmd = format("{kinit_cmd} hbase shell {hbase_grant_premissions_file}")
+      grantprivelegecmd = format("{kinit_cmd} {hbase_cmd} shell {hbase_grant_premissions_file}")
   
       File( hbase_grant_premissions_file,
         owner   = params.hbase_user,
@@ -57,8 +57,8 @@ class HbaseServiceCheck(Script):
         user = params.hbase_user,
       )
 
-    servicecheckcmd = format("{smokeuser_kinit_cmd} hbase --config {hbase_conf_dir} shell {hbase_servicecheck_file}")
-    smokeverifycmd = format("{smokeuser_kinit_cmd} {exec_tmp_dir}/hbaseSmokeVerify.sh {hbase_conf_dir} {service_check_data}")
+    servicecheckcmd = format("{smokeuser_kinit_cmd} {hbase_cmd} --config {hbase_conf_dir} shell {hbase_servicecheck_file}")
+    smokeverifycmd = format("{smokeuser_kinit_cmd} {exec_tmp_dir}/hbaseSmokeVerify.sh {hbase_conf_dir} {service_check_data} {hbase_cmd}")
   
     Execute( servicecheckcmd,
       tries     = 3,

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
index d22d901..c9a3828 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
@@ -24,6 +24,8 @@ export hdfs_user=$1
 shift
 export conf_dir=$1
 shift
+export bin_dir=$1
+shift
 export old_mark_dir=$1
 shift
 export mark_dir=$1
@@ -56,7 +58,7 @@ if [[ ! -d $mark_dir ]] ; then
   done
 
   if [[ $EXIT_CODE == 0 ]] ; then
-    su - ${hdfs_user} -c "yes Y | hadoop --config ${conf_dir} ${command}"
+    su - ${hdfs_user} -c "export PATH=$PATH:${bin_dir} ; yes Y | hadoop --config ${conf_dir} ${command}"
   else
     echo "ERROR: Namenode directory(s) is non empty. Will not format the namenode. List of non-empty namenode dirs ${list_of_non_empty_dirs}"
   fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
index c4b48c6..68cf4fd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
@@ -45,11 +45,11 @@ def namenode(action=None, do_format=True):
       create_log_dir=True
     )
     if params.dfs_ha_enabled:
-      dfs_check_nn_status_cmd = format("su - {hdfs_user} -c 'hdfs haadmin -getServiceState {namenode_id} | grep active > /dev/null'")
+      dfs_check_nn_status_cmd = format("su - {hdfs_user} -c 'export PATH=$PATH:{hadoop_bin_dir} ; hdfs --config {hadoop_conf_dir} haadmin -getServiceState {namenode_id} | grep active > /dev/null'")
     else:
       dfs_check_nn_status_cmd = None
 
-    namenode_safe_mode_off = format("su - {hdfs_user} -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'")
+    namenode_safe_mode_off = format("su - {hdfs_user} -c 'export PATH=$PATH:{hadoop_bin_dir} ; hadoop --config {hadoop_conf_dir} dfsadmin -safemode get' | grep 'Safe mode is OFF'")
 
     if params.security_enabled:
       Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"),
@@ -110,14 +110,16 @@ def format_namenode(force=None):
   if not params.dfs_ha_enabled:
     if force:
       ExecuteHadoop('namenode -format',
-                    kinit_override=True)
+                    kinit_override=True,
+                    bin_dir=params.hadoop_bin_dir,
+                    conf_dir=hadoop_conf_dir)
     else:
       File(format("{tmp_dir}/checkForFormat.sh"),
            content=StaticFile("checkForFormat.sh"),
            mode=0755)
       Execute(format(
-        "{tmp_dir}/checkForFormat.sh {hdfs_user} {hadoop_conf_dir} {old_mark_dir} "
-        "{mark_dir} {dfs_name_dir}"),
+        "{tmp_dir}/checkForFormat.sh {hdfs_user} {hadoop_conf_dir} "
+        "{hadoop_bin_dir} {old_mark_dir} {mark_dir} {dfs_name_dir}"),
               not_if=format("test -d {old_mark_dir} || test -d {mark_dir}"),
               path="/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin"
       )
@@ -154,4 +156,5 @@ def decommission():
   ExecuteHadoop(nn_refresh_cmd,
                 user=hdfs_user,
                 conf_dir=conf_dir,
-                kinit_override=True)
+                kinit_override=True,
+                bin_dir=params.hadoop_bin_dir)

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
index 8dae3eb..a0b07aa 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
@@ -88,7 +88,7 @@ class NameNode(Script):
     
     
     def startRebalancingProcess(threshold):
-      rebalanceCommand = format('hadoop --config {hadoop_conf_dir} balancer -threshold {threshold}')
+      rebalanceCommand = format('export PATH=$PATH:{hadoop_bin_dir} ; hadoop --config {hadoop_conf_dir} balancer -threshold {threshold}')
       return ['su','-',params.hdfs_user,'-c', rebalanceCommand]
     
     command = startRebalancingProcess(threshold)

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 47ee8ca..60198c7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -24,6 +24,28 @@ import os
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_conf_empty_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf.empty")
+  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
+  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
+  hadoop_bin = format("/usr/hdp/{rpm_version}/hadoop/sbin")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  limits_conf_dir = format("/usr/hdp/{rpm_version}/etc/security/limits.d")
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+  hadoop_bin = "/usr/lib/hadoop/sbin"
+  hadoop_bin_dir = "/usr/bin"
+  limits_conf_dir = "/etc/security/limits.d"
+
+execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 ulimit_cmd = "ulimit -c unlimited; "
 
 #security params
@@ -100,9 +122,7 @@ proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 nagios_group = config['configurations']['nagios-env']['nagios_group']
 
 #hadoop params
-hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_pid_dir_prefix = status_params.hadoop_pid_dir_prefix
-hadoop_bin = "/usr/lib/hadoop/sbin"
 
 hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
 hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
@@ -110,8 +130,6 @@ hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger'
 dfs_domain_socket_path = config['configurations']['hdfs-site']['dfs.domain.socket.path']
 dfs_domain_socket_dir = os.path.dirname(dfs_domain_socket_path)
 
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-
 jn_edits_dir = config['configurations']['hdfs-site']['dfs.journalnode.edits.dir']
 
 dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
@@ -171,11 +189,10 @@ HdfsDirectory = functools.partial(
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )
 
-limits_conf_dir = "/etc/security/limits.d"
-
 io_compression_codecs = config['configurations']['core-site']['io.compression.codecs']
 if not "com.hadoop.compression.lzo" in io_compression_codecs:
   exclude_packages = ["lzo", "hadoop-lzo", "hadoop-lzo-native", "liblzo2-2"]
@@ -184,8 +201,6 @@ else:
 name_node_params = default("/commandParams/namenode", None)
 
 #hadoop params
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
-
 hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
 
 #hadoop-env.sh
@@ -209,5 +224,4 @@ ttnode_heapsize = "1024m"
 
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
index 66f2ae1..18f58bd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
@@ -31,13 +31,14 @@ class HdfsServiceCheck(Script):
 
     safemode_command = "dfsadmin -safemode get | grep OFF"
 
-    create_dir_cmd = format("fs -mkdir {dir} ; hadoop fs -chmod 777 {dir}")
-    test_dir_exists = format("hadoop fs -test -e {dir}")
+    create_dir_cmd = format("fs -mkdir {dir}")
+    chmod_command = format("fs -chmod 777 {dir}")
+    test_dir_exists = format("hadoop --config {hadoop_conf_dir} fs -test -e {dir}")
     cleanup_cmd = format("fs -rm {tmp_file}")
     #cleanup put below to handle retries; if retrying there wil be a stale file
     #that needs cleanup; exit code is fn of second command
     create_file_cmd = format(
-      "{cleanup_cmd}; hadoop fs -put /etc/passwd {tmp_file}")
+      "{cleanup_cmd}; hadoop --config {hadoop_conf_dir} fs -put /etc/passwd {tmp_file}")
     test_cmd = format("fs -test -e {tmp_file}")
     if params.security_enabled:
       Execute(format(
@@ -48,7 +49,8 @@ class HdfsServiceCheck(Script):
                   logoutput=True,
                   conf_dir=params.hadoop_conf_dir,
                   try_sleep=3,
-                  tries=20
+                  tries=20,
+                  bin_dir=params.hadoop_bin_dir
     )
     ExecuteHadoop(create_dir_cmd,
                   user=params.smoke_user,
@@ -56,21 +58,32 @@ class HdfsServiceCheck(Script):
                   not_if=test_dir_exists,
                   conf_dir=params.hadoop_conf_dir,
                   try_sleep=3,
-                  tries=5
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
+    )
+    ExecuteHadoop(chmod_command,
+                  user=params.smoke_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
     ExecuteHadoop(create_file_cmd,
                   user=params.smoke_user,
                   logoutput=True,
                   conf_dir=params.hadoop_conf_dir,
                   try_sleep=3,
-                  tries=5
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
     ExecuteHadoop(test_cmd,
                   user=params.smoke_user,
                   logoutput=True,
                   conf_dir=params.hadoop_conf_dir,
                   try_sleep=3,
-                  tries=5
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
     if params.has_journalnode_hosts:
       journalnode_port = params.journalnode_port

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
index 53a62ce..eb05481 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
@@ -25,6 +25,12 @@ import sys
 def hcat():
   import params
 
+  Directory(params.hive_conf_dir,
+            owner=params.hcat_user,
+            group=params.user_group,
+  )
+
+
   Directory(params.hcat_conf_dir,
             owner=params.hcat_user,
             group=params.user_group,

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
index ec8faa9..ede7e27 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
@@ -45,6 +45,7 @@ def hcat_service_check():
             user=params.smokeuser,
             try_sleep=5,
             path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+            environment = {'PATH' : params.execute_path},
             logoutput=True)
 
     if params.security_enabled:
@@ -55,7 +56,8 @@ def hcat_service_check():
                     security_enabled=params.security_enabled,
                     kinit_path_local=params.kinit_path_local,
                     keytab=params.hdfs_user_keytab,
-                    principal=params.hdfs_principal_name
+                    principal=params.hdfs_principal_name,
+                    bin_dir=params.hive_bin
       )
     else:
       ExecuteHadoop(test_cmd,
@@ -64,7 +66,8 @@ def hcat_service_check():
                     conf_dir=params.hadoop_conf_dir,
                     security_enabled=params.security_enabled,
                     kinit_path_local=params.kinit_path_local,
-                    keytab=params.hdfs_user_keytab
+                    keytab=params.hdfs_user_keytab,
+                    bin_dir=params.hive_bin
       )
 
     cleanup_cmd = format("{kinit_cmd} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} cleanup")
@@ -72,6 +75,7 @@ def hcat_service_check():
     Execute(cleanup_cmd,
             tries=3,
             user=params.smokeuser,
+            environment = {'PATH' : params.execute_path },
             try_sleep=5,
             path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
             logoutput=True

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
index 0b7fcb4..e6e5eb8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
@@ -188,6 +188,7 @@ def jdbc_connector():
     Execute(cmd,
             not_if=format("test -f {target}"),
             creates=params.target,
+            environment= {'PATH' : params.execute_path },
             path=["/bin", "/usr/bin/"])
   elif params.hive_jdbc_driver == "org.postgresql.Driver":
     cmd = format("hive mkdir -p {artifact_dir} ; cp /usr/share/java/{jdbc_jar_name} {target}")
@@ -195,6 +196,7 @@ def jdbc_connector():
     Execute(cmd,
             not_if=format("test -f {target}"),
             creates=params.target,
+            environment= {'PATH' : params.execute_path },
             path=["/bin", "usr/bin/"])
 
   elif params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
index 8507816..d88d0b0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
@@ -49,6 +49,7 @@ def hive_service(
     
     Execute(demon_cmd,
             user=params.hive_user,
+            environment= {'PATH' : params.execute_path, 'HADOOP_HOME' : params.hadoop_home },
             not_if=process_id_exists
     )
 
@@ -103,8 +104,10 @@ def hive_service(
 def check_fs_root():
   import params  
   fs_root_url = format("{fs_root}{hive_apps_whs_dir}")
-  cmd = "/usr/lib/hive/bin/metatool -listFSRoot 2>/dev/null | grep hdfs://"
+  cmd = format("metatool -listFSRoot 2>/dev/null | grep hdfs://")
   code, out = call(cmd, user=params.hive_user)
   if code == 0 and fs_root_url.strip() != out.strip():
-    cmd = format("/usr/lib/hive/bin/metatool -updateLocation {fs_root}{hive_apps_whs_dir} {out}")
-    Execute(cmd, user=params.hive_user)
\ No newline at end of file
+    cmd = format("metatool -updateLocation {fs_root}{hive_apps_whs_dir} {out}")
+    Execute(cmd,
+            environment= {'PATH' : params.execute_path },
+            user=params.hive_user)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
index b6d542d..3548de7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
@@ -69,7 +69,8 @@ def install_tez_jars():
                     owner=params.tez_user,
                     dest_dir=app_dir_path,
                     kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user
+                    hdfs_user=params.hdfs_user,
+                    hadoop_conf_dir=params.hadoop_conf_dir
       )
     pass
 
@@ -79,7 +80,8 @@ def install_tez_jars():
                     owner=params.tez_user,
                     dest_dir=lib_dir_path,
                     kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user
+                    hdfs_user=params.hdfs_user,
+                    hadoop_conf_dir=params.hadoop_conf_dir
       )
     pass
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
index b1a4a49..a38c12a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
@@ -26,6 +26,53 @@ import os
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+hdp_stack_version = config['hostLevelParams']['stack_version']
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
+  hive_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf')
+  hive_client_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf')
+  hive_server_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf.server')
+  hive_bin = format('/usr/hdp/{rpm_version}/hive/bin')
+  hive_lib = format('/usr/hdp/{rpm_version}/hive/lib')
+  tez_local_api_jars = format('/usr/hdp/{rpm_version}/tez/tez*.jar')
+  tez_local_lib_jars = format('/usr/hdp/{rpm_version}/tez/lib/*.jar')
+
+  if str(hdp_stack_version).startswith('2.0'):
+    hcat_conf_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
+    hcat_lib = format('/usr/hdp/{rpm_version}/hive/hcatalog/share/hcatalog')
+  # for newer versions
+  else:
+    hcat_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive-hcatalog/conf')
+    hcat_lib = format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/share/hcatalog')
+
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_home = '/usr'
+  hive_conf_dir = "/etc/hive/conf"
+  hive_bin = '/usr/lib/hive/bin'
+  hive_lib = '/usr/lib/hive/lib/'
+  hive_client_conf_dir = "/etc/hive/conf"
+  hive_server_conf_dir = '/etc/hive/conf.server'
+  tez_local_api_jars = '/usr/lib/tez/tez*.jar'
+  tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+
+  if str(hdp_stack_version).startswith('2.0'):
+    hcat_conf_dir = '/etc/hcatalog/conf'
+    hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
+  # for newer versions
+  else:
+    hcat_conf_dir = '/etc/hive-hcatalog/conf'
+    hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
+
+execute_path = os.environ['PATH'] + os.pathsep + hive_bin
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
 
@@ -34,7 +81,6 @@ hive_metastore_db_type = config['configurations']['hive-env']['hive_database_typ
 
 #users
 hive_user = config['configurations']['hive-env']['hive_user']
-hive_lib = '/usr/lib/hive/lib/'
 #JDBC driver jar name
 hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
 if hive_jdbc_driver == "com.mysql.jdbc.Driver":
@@ -51,11 +97,9 @@ check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
 
 #common
-hdp_stack_version = config['hostLevelParams']['stack_version']
 hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
 hive_var_lib = '/var/lib/hive'
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
-hive_bin = '/usr/lib/hive/bin'
 hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
 hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
 hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
@@ -77,8 +121,6 @@ hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
 hive_pid_dir = status_params.hive_pid_dir
 hive_pid = status_params.hive_pid
 #Default conf dir for client
-hive_client_conf_dir = "/etc/hive/conf"
-hive_server_conf_dir = "/etc/hive/conf.server"
 hive_conf_dirs_list = [hive_server_conf_dir, hive_client_conf_dir]
 
 if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
@@ -92,8 +134,6 @@ hive_database_name = config['configurations']['hive-env']['hive_database_name']
 #Starting hiveserver2
 start_hiveserver2_script = 'startHiveserver2.sh.j2'
 
-hadoop_home = '/usr'
-
 ##Starting metastore
 start_metastore_script = 'startMetastore.sh'
 hive_metastore_pid = status_params.hive_metastore_pid
@@ -133,15 +173,6 @@ else:
 
 ########## HCAT
 
-if str(hdp_stack_version).startswith('2.0'):
-  hcat_conf_dir = '/etc/hcatalog/conf'
-  hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
-# for newer versions
-else:
-  hcat_conf_dir = '/etc/hive-hcatalog/conf'
-  hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
-
-
 hcat_dbroot = hcat_lib
 
 hcat_user = config['configurations']['hive-env']['hcat_user']
@@ -150,8 +181,6 @@ webhcat_user = config['configurations']['hive-env']['webhcat_user']
 hcat_pid_dir = status_params.hcat_pid_dir
 hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']
 
-hadoop_conf_dir = '/etc/hadoop/conf'
-
 #hive-log4j.properties.template
 if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):
   log4j_props = config['configurations']['hive-log4j']['content']
@@ -172,7 +201,6 @@ hive_hdfs_user_mode = 0700
 hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -180,8 +208,6 @@ kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/
 
 # Tez libraries
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
-tez_local_api_jars = '/usr/lib/tez/tez*.jar'
-tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
 tez_user = config['configurations']['tez-env']['tez_user']
 
 if System.get_instance().os_family == "ubuntu":
@@ -205,13 +231,12 @@ else:
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
 HdfsDirectory = functools.partial(
   HdfsDirectory,
   conf_dir=hadoop_conf_dir,
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/templates/startHiveserver2.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/templates/startHiveserver2.sh.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/templates/startHiveserver2.sh.j2
index a8fe21c..3ddf50f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/templates/startHiveserver2.sh.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/templates/startHiveserver2.sh.j2
@@ -25,5 +25,5 @@ HIVE_SERVER2_OPTS=" -hiveconf hive.log.file=hiveserver2.log -hiveconf hive.log.d
 HIVE_SERVER2_OPTS="${HIVE_SERVER2_OPTS} -hiveconf hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator -hiveconf hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory "
 {% endif %}
 
-HIVE_CONF_DIR=$4 /usr/lib/hive/bin/hiveserver2 -hiveconf hive.metastore.uris=" " ${HIVE_SERVER2_OPTS} > $1 2> $2 &
+HIVE_CONF_DIR=$4 {{hive_bin}}/hiveserver2 -hiveconf hive.metastore.uris=" " ${HIVE_SERVER2_OPTS} > $1 2> $2 &
 echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml
index fc47a70..9631f0d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml
@@ -122,7 +122,7 @@ export OOZIE_ADMIN_PORT={{oozie_server_admin_port}}
 # The base URL for callback URLs to Oozie
 #
 # export OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie"
-export JAVA_LIBRARY_PATH=/usr/lib/hadoop/lib/native/Linux-amd64-64
+export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-amd64-64
     </value>
   </property>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
index 0a80d0f..6d43880 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
@@ -93,10 +93,10 @@ else
   kinitcmd=""
 fi
 
-su - ${smoke_test_user} -c "hdfs dfs -rm -r examples"
-su - ${smoke_test_user} -c "hdfs dfs -rm -r input-data"
-su - ${smoke_test_user} -c "hdfs dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
-su - ${smoke_test_user} -c "hdfs dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
+su - ${smoke_test_user} -c "hdfs --config ${hadoop_conf_dir} dfs -rm -r examples"
+su - ${smoke_test_user} -c "hdfs --config ${hadoop_conf_dir} dfs -rm -r input-data"
+su - ${smoke_test_user} -c "hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
+su - ${smoke_test_user} -c "hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
 
 cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; /usr/bin/oozie -Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config $OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
 echo $cmd

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
index 78661b0..bbbedbe 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
@@ -37,7 +37,7 @@ def oozie_service(action = 'start'): # 'start' or 'stop'
       db_connection_check_command = None
       
     cmd1 =  format("cd {oozie_tmp_dir} && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run")
-    cmd2 =  format("{kinit_if_needed} {put_shared_lib_to_hdfs_cmd} ; hadoop dfs -chmod -R 755 {oozie_hdfs_user_dir}/share")
+    cmd2 =  format("{kinit_if_needed} {put_shared_lib_to_hdfs_cmd} ; hadoop --config {hadoop_conf_dir} dfs -chmod -R 755 {oozie_hdfs_user_dir}/share")
 
     if not os.path.isfile(params.jdbc_driver_jar) and params.jdbc_driver_name == "org.postgresql.Driver":
       print "ERROR: jdbc file " + params.jdbc_driver_jar + " is unavailable. Please, follow next steps:\n" \
@@ -58,7 +58,7 @@ def oozie_service(action = 'start'): # 'start' or 'stop'
     
     Execute( cmd2,
       user = params.oozie_user,       
-      not_if = format("{kinit_if_needed} hadoop dfs -ls /user/oozie/share | awk 'BEGIN {{count=0;}} /share/ {{count++}} END {{if (count > 0) {{exit 0}} else {{exit 1}}}}'")
+      not_if = format("{kinit_if_needed} hadoop --config {hadoop_conf_dir} dfs -ls /user/oozie/share | awk 'BEGIN {{count=0;}} /share/ {{count++}} END {{if (count > 0) {{exit 0}} else {{exit 1}}}}'")
     )
     
     Execute( start_cmd,

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
index a484c0e..ac26ede 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
@@ -25,15 +25,28 @@ import status_params
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  hadoop_lib_home = format("/usr/hdp/{rpm_version}/hadoop/lib")
+  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_lib_home = "/usr/lib/hadoop/lib"
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+
 oozie_user = config['configurations']['oozie-env']['oozie_user']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 conf_dir = "/etc/oozie/conf"
-hadoop_conf_dir = "/etc/hadoop/conf"
 user_group = config['configurations']['cluster-env']['user_group']
 jdk_location = config['hostLevelParams']['jdk_location']
 check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
-hadoop_prefix = "/usr"
 oozie_tmp_dir = "/var/tmp/oozie"
 oozie_hdfs_user_dir = format("/user/{oozie_user}")
 oozie_pid_dir = status_params.oozie_pid_dir
@@ -53,7 +66,6 @@ oozie_keytab = config['configurations']['oozie-env']['oozie_keytab']
 oozie_env_sh_template = config['configurations']['oozie-env']['content']
 
 oracle_driver_jar_name = "ojdbc6.jar"
-java_share_dir = "/usr/share/java"
 
 java_home = config['hostLevelParams']['java_home']
 oozie_metastore_user_name = config['configurations']['oozie-site']['oozie.service.JPAService.jdbc.username']
@@ -71,7 +83,7 @@ oozie_shared_lib = "/usr/lib/oozie/share"
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 
 if str(hdp_stack_version).startswith('2.0') or str(hdp_stack_version).startswith('2.1'):
-  put_shared_lib_to_hdfs_cmd = format("hadoop dfs -put {oozie_shared_lib} {oozie_hdfs_user_dir}")
+  put_shared_lib_to_hdfs_cmd = format("hadoop --config {hadoop_conf_dir} dfs -put {oozie_shared_lib} {oozie_hdfs_user_dir}")
 # for newer
 else:
   put_shared_lib_to_hdfs_cmd = format("{oozie_setup_sh} sharelib create -fs {fs_root} -locallib {oozie_shared_lib}")
@@ -103,7 +115,6 @@ oozie_hdfs_user_dir = format("/user/{oozie_user}")
 oozie_hdfs_user_mode = 0775
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -117,5 +128,6 @@ HdfsDirectory = functools.partial(
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
index 1b522b8..d1f8b75 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
@@ -25,8 +25,23 @@ from resource_management import *
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-pig_conf_dir = "/etc/pig/conf"
-hadoop_conf_dir = "/etc/hadoop/conf"
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
+  pig_conf_dir = format('/usr/hdp/{rpm_version}/etc/pig/conf')
+  pig_bin_dir = format('/usr/hdp/{rpm_version}/pig/bin')
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_home = '/usr'
+  pig_conf_dir = "/etc/pig/conf"
+  pig_bin_dir = ""
+
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
@@ -38,7 +53,6 @@ pig_env_sh_template = config['configurations']['pig-env']['content']
 
 # not supporting 32 bit jdk.
 java64_home = config['hostLevelParams']['java_home']
-hadoop_home = "/usr"
 
 pig_properties = config['configurations']['pig-properties']['content']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py
index 8431b6d..7619bd6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py
@@ -31,7 +31,7 @@ class PigServiceCheck(Script):
 
     cleanup_cmd = format("dfs -rmr {output_file} {input_file}")
     #cleanup put below to handle retries; if retrying there wil be a stale file that needs cleanup; exit code is fn of second command
-    create_file_cmd = format("{cleanup_cmd}; hadoop dfs -put /etc/passwd {input_file} ") #TODO: inconsistent that second command needs hadoop
+    create_file_cmd = format("{cleanup_cmd}; hadoop --config {hadoop_conf_dir} dfs -put /etc/passwd {input_file} ") #TODO: inconsistent that second command needs hadoop
     test_cmd = format("fs -test -e {output_file}")
 
     ExecuteHadoop( create_file_cmd,
@@ -42,7 +42,8 @@ class PigServiceCheck(Script):
       # for kinit run
       keytab = params.smoke_user_keytab,
       security_enabled = params.security_enabled,
-      kinit_path_local = params.kinit_path_local
+      kinit_path_local = params.kinit_path_local,
+      bin_dir = params.hadoop_bin_dir
     )
 
     File( format("{tmp_dir}/pigSmoke.sh"),
@@ -53,13 +54,14 @@ class PigServiceCheck(Script):
     Execute( format("pig {tmp_dir}/pigSmoke.sh"),
       tries     = 3,
       try_sleep = 5,
-      path      = '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+      path      = format('{pig_bin_dir}:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
       user      = params.smokeuser
     )
 
     ExecuteHadoop( test_cmd,
       user      = params.smokeuser,
-      conf_dir = params.hadoop_conf_dir
+      conf_dir = params.hadoop_conf_dir,
+      bin_dir = params.hadoop_bin_dir
     )
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
index 144a587..9170fdc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
@@ -21,6 +21,15 @@ from resource_management import *
 
 config = Script.get_config()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  zoo_conf_dir = format('/usr/hdp/{rpm_version}/etc/zookeeper')
+else:
+  zoo_conf_dir = "/etc/zookeeper"
+
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 user_group = config['configurations']['cluster-env']['user_group']
@@ -29,7 +38,6 @@ sqoop_env_sh_template = config['configurations']['sqoop-env']['content']
 sqoop_conf_dir = "/usr/lib/sqoop/conf"
 hbase_home = "/usr"
 hive_home = "/usr"
-zoo_conf_dir = "/etc/zookeeper"
 sqoop_lib = "/usr/lib/sqoop/lib"
 sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml
index 304bbb7..1dba691 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml
@@ -47,7 +47,7 @@ CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
 #HCAT_PREFIX=hive_prefix
 
 # Set HADOOP_HOME to point to a specific hadoop install directory
-export HADOOP_HOME=/usr/lib/hadoop
+export HADOOP_HOME={{hadoop_home}}
     </value>
   </property>
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
index a7959f0..f37ac27 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
@@ -26,16 +26,36 @@ import status_params
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-hcat_user = config['configurations']['hive-env']['hcat_user']
-webhcat_user = config['configurations']['hive-env']['webhcat_user']
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
-if str(config['hostLevelParams']['stack_version']).startswith('2.0'):
-  config_dir = '/etc/hcatalog/conf'
-  webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
-# for newer versions
+#hadoop params
+hdp_stack_version = config['hostLevelParams']['stack_version']
+if rpm_version is not None:
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
+  hadoop_streeming_jars = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/hadoop-streaming-*.jar")
+  if str(hdp_stack_version).startswith('2.0'):
+    config_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
+    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hcatalog/sbin')
+  # for newer versions
+  else:
+    config_dir = format('/usr/hdp/{rpm_version}/etc/hive-webhcat/conf')
+    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/sbin')
 else:
-  config_dir = '/etc/hive-webhcat/conf'
-  webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_home = '/usr'
+  hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
+  if str(hdp_stack_version).startswith('2.0'):
+    config_dir = '/etc/hcatalog/conf'
+    webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
+  # for newer versions
+  else:
+    config_dir = '/etc/hive-webhcat/conf'
+    webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
+
+hcat_user = config['configurations']['hive-env']['hcat_user']
+webhcat_user = config['configurations']['hive-env']['webhcat_user']
 
 webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
 templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
@@ -46,7 +66,6 @@ pid_file = status_params.pid_file
 hadoop_conf_dir = config['configurations']['webhcat-site']['templeton.hadoop.conf.dir']
 templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
 
-hadoop_home = '/usr'
 user_group = config['configurations']['cluster-env']['user_group']
 
 webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
@@ -64,7 +83,6 @@ webhcat_hdfs_user_mode = 0755
 webhcat_apps_dir = "/apps/webhcat"
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 security_param = "true" if security_enabled else "false"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
@@ -79,5 +97,6 @@ HdfsDirectory = functools.partial(
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
index 3092735..c56ae5f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
@@ -84,12 +84,13 @@ def webhcat():
             path='/bin'
     )
 
-  CopyFromLocal('/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
+  CopyFromLocal(params.hadoop_streeming_jars,
                 owner=params.webhcat_user,
                 mode=0755,
                 dest_dir=params.webhcat_apps_dir,
                 kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user
+                hdfs_user=params.hdfs_user,
+                hadoop_conf_dir=params.hadoop_conf_dir
   )
 
   CopyFromLocal('/usr/share/HDP-webhcat/pig.tar.gz',
@@ -97,7 +98,8 @@ def webhcat():
                 mode=0755,
                 dest_dir=params.webhcat_apps_dir,
                 kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user
+                hdfs_user=params.hdfs_user,
+                hadoop_conf_dir=params.hadoop_conf_dir
   )
 
   CopyFromLocal('/usr/share/HDP-webhcat/hive.tar.gz',
@@ -105,5 +107,6 @@ def webhcat():
                 mode=0755,
                 dest_dir=params.webhcat_apps_dir,
                 kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user
+                hdfs_user=params.hdfs_user,
+                hadoop_conf_dir=params.hadoop_conf_dir
   )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
index 313ed94..f8d670e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
@@ -18,6 +18,7 @@ limitations under the License.
 Ambari Agent
 
 """
+import os
 
 from resource_management import *
 import status_params
@@ -26,7 +27,34 @@ import status_params
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-config_dir = "/etc/hadoop/conf"
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
+  hadoop_bin = format("/usr/hdp/{rpm_version}/hadoop/sbin")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  limits_conf_dir = format("/usr/hdp/{rpm_version}/etc/security/limits.d")
+  hadoop_yarn_home = format('/usr/hdp/{rpm_version}/hadoop-yarn')
+  hadoop_mapred2_jar_location = format('/usr/hdp/{rpm_version}/hadoop-mapreduce')
+  mapred_bin = format('/usr/hdp/{rpm_version}/hadoop-mapreduce/sbin')
+  yarn_bin = format('/usr/hdp/{rpm_version}/hadoop-yarn/sbin')
+  yarn_container_bin = format('/usr/hdp/{rpm_version}/hadoop-yarn/bin')
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+  hadoop_bin = "/usr/lib/hadoop/sbin"
+  hadoop_bin_dir = "/usr/bin"
+  limits_conf_dir = "/etc/security/limits.d"
+  hadoop_yarn_home = '/usr/lib/hadoop-yarn'
+  hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
+  mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
+  yarn_bin = "/usr/lib/hadoop-yarn/sbin"
+  yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
+
+execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 
 ulimit_cmd = "ulimit -c unlimited;"
 
@@ -49,8 +77,6 @@ rm_nodes_exclude_path = default("/configurations/yarn-site/yarn.resourcemanager.
 java64_home = config['hostLevelParams']['java_home']
 hadoop_ssl_enabled = default("/configurations/core-site/hadoop.ssl.enabled", False)
 
-hadoop_libexec_dir = '/usr/lib/hadoop/libexec'
-hadoop_yarn_home = '/usr/lib/hadoop-yarn'
 yarn_heapsize = config['configurations']['yarn-env']['yarn_heapsize']
 resourcemanager_heapsize = config['configurations']['yarn-env']['resourcemanager_heapsize']
 nodemanager_heapsize = config['configurations']['yarn-env']['nodemanager_heapsize']
@@ -77,8 +103,6 @@ hs_webui_address = config['configurations']['mapred-site']['mapreduce.jobhistory
 nm_local_dirs = config['configurations']['yarn-site']['yarn.nodemanager.local-dirs']
 nm_log_dirs = config['configurations']['yarn-site']['yarn.nodemanager.log-dirs']
 
-
-hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
 distrAppJarName = "hadoop-yarn-applications-distributedshell-2.*.jar"
 hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-2.*.jar"
 
@@ -90,13 +114,7 @@ yarn_log_dir = format("{yarn_log_dir_prefix}/{yarn_user}")
 mapred_job_summary_log = format("{mapred_log_dir_prefix}/{mapred_user}/hadoop-mapreduce.jobsummary.log")
 yarn_job_summary_log = format("{yarn_log_dir_prefix}/{yarn_user}/hadoop-mapreduce.jobsummary.log")
 
-mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
-yarn_bin = "/usr/lib/hadoop-yarn/sbin"
-
 user_group = config['configurations']['cluster-env']['user_group']
-limits_conf_dir = "/etc/security/limits.d"
-hadoop_conf_dir = "/etc/hadoop/conf"
-yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
 
 #exclude file
 exclude_hosts = default("/clusterHostInfo/decom_nm_hosts", [])
@@ -128,7 +146,6 @@ jobhistory_heapsize = default("/configurations/mapred-env/jobhistory_heapsize",
 
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -142,11 +159,11 @@ HdfsDirectory = functools.partial(
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )
 update_exclude_file_only = config['commandParams']['update_exclude_file_only']
 
-hadoop_bin = "/usr/lib/hadoop/sbin"
 mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 
 #taskcontroller.cfg

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
index af678d0..4d40d68 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
@@ -78,10 +78,10 @@ class Resourcemanager(Script):
     env.set_params(params)
     rm_kinit_cmd = params.rm_kinit_cmd
     yarn_user = params.yarn_user
-    conf_dir = params.config_dir
+    conf_dir = params.hadoop_conf_dir
     user_group = params.user_group
 
-    yarn_refresh_cmd = format("{rm_kinit_cmd} /usr/bin/yarn --config {conf_dir} rmadmin -refreshNodes")
+    yarn_refresh_cmd = format("{rm_kinit_cmd} yarn --config {conf_dir} rmadmin -refreshNodes")
 
     File(params.exclude_file_path,
          content=Template("exclude_hosts_list.j2"),
@@ -91,6 +91,7 @@ class Resourcemanager(Script):
 
     if params.update_exclude_file_only == False:
       Execute(yarn_refresh_cmd,
+            environment= {'PATH' : params.execute_path },
             user=yarn_user)
       pass
     pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service.py
index 42a7138..466f637 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service.py
@@ -35,7 +35,7 @@ def service(componentName, action='start', serviceName='yarn'):
     pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-{componentName}.pid")
     usr = params.yarn_user
 
-  cmd = format("export HADOOP_LIBEXEC_DIR={hadoop_libexec_dir} && {daemon} --config {config_dir}")
+  cmd = format("export HADOOP_LIBEXEC_DIR={hadoop_libexec_dir} && {daemon} --config {hadoop_conf_dir}")
 
   if action == 'start':
     daemon_cmd = format("{ulimit_cmd} {cmd} start {componentName}")

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
index 2ed67ab..7e535a5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
@@ -27,7 +27,7 @@ class ServiceCheck(Script):
     import params
     env.set_params(params)
 
-    run_yarn_check_cmd = "/usr/bin/yarn node -list"
+    run_yarn_check_cmd = format("yarn --config {hadoop_conf_dir} node -list")
 
     component_type = 'rm'
     if params.hadoop_ssl_enabled:
@@ -60,6 +60,7 @@ class ServiceCheck(Script):
     )
 
     Execute(run_yarn_check_cmd,
+            environment= {'PATH' : params.execute_path },
             user=params.smokeuser
     )
 


[05/30] git commit: AMBARI-7233. Admin View: misc UI improvements. (yusaku)

Posted by jo...@apache.org.
AMBARI-7233. Admin View: misc UI improvements. (yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d961ca00
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d961ca00
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d961ca00

Branch: refs/heads/branch-alerts-dev
Commit: d961ca0049aaf756616a91220f5d9878f7518f2b
Parents: 7d9feb6
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Thu Sep 11 12:49:27 2014 -0700
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Thu Sep 11 12:57:11 2014 -0700

----------------------------------------------------------------------
 .../main/resources/ui/admin-web/app/index.html  |  7 +++--
 .../resources/ui/admin-web/app/scripts/app.js   |  3 +-
 .../controllers/ambariViews/ViewsEditCtrl.js    |  7 +----
 .../controllers/groups/GroupsEditCtrl.js        |  8 +++---
 .../app/scripts/controllers/mainCtrl.js         |  2 +-
 .../scripts/controllers/users/UsersShowCtrl.js  | 18 ++++++++----
 .../ui/admin-web/app/scripts/i18n.config.js     | 29 ++++++++++++++++++++
 .../resources/ui/admin-web/app/styles/main.css  |  9 ++++++
 .../admin-web/app/views/ambariViews/edit.html   | 10 +++----
 .../app/views/clusters/manageAccess.html        |  2 +-
 .../ui/admin-web/app/views/groups/edit.html     |  4 +--
 .../ui/admin-web/app/views/users/show.html      |  8 +++---
 .../src/main/resources/ui/admin-web/bower.json  |  4 ++-
 13 files changed, 78 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/app/index.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/index.html b/ambari-admin/src/main/resources/ui/admin-web/app/index.html
index e7a8371..7ff0638 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/index.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/index.html
@@ -29,6 +29,7 @@
     <!-- bower:css -->
     <link rel="stylesheet" href="bower_components/bootstrap/dist/css/bootstrap.css" />
     <link rel="stylesheet" href="bower_components/angular-bootstrap-toggle-switch/style/bootstrap3/angular-toggle-switch-bootstrap-3.css" />
+    <link rel="stylesheet" href="bower_components/font-awesome/css/font-awesome.min.css"/>
     <!-- endbower -->
     <!-- endbuild -->
 
@@ -52,12 +53,12 @@
               <li>
                 <div class="btn-group" dropdown is-open="status.isopen">
                 <button type="button" class="btn btn-default dropdown-toggle navbar-btn" ng-disabled="disabled">
-                    {{currentUser}} <span class="caret"></span>
+                    <i class="fa fa-user"></i> {{currentUser}} <span class="caret"></span>
                   </button>
                   <ul class="dropdown-menu" role="menu">
                     <li><a href ng-click="about()">About</a></li>
                     <li class="divider"></li>
-                    <li><a href ng-click="signOut()">Sign Out</a></li>
+                    <li><a href ng-click="signOut()">Sign out</a></li>
                   </ul>
                 </div>
               </li>
@@ -91,6 +92,7 @@
     <script src="bower_components/lodash/dist/lodash.compat.js"></script>
     <script src="bower_components/restangular/dist/restangular.js"></script>
     <script src="bower_components/angular-bootstrap-toggle-switch/angular-toggle-switch.min.js"></script>
+    <script src="bower_components/angular-translate/angular-translate.min.js"></script>
     <!-- endbower -->
     <!-- endbuild -->
 
@@ -112,6 +114,7 @@
     <!-- build:js scripts/main.js -->
     <script src="scripts/app.js"></script>
     <script src="scripts/routes.js"></script>
+    <script src="scripts/i18n.config.js"></script>
     <script src="scripts/controllers/mainCtrl.js"></script>
     <script src="scripts/controllers/NavbarCtrl.js"></script>
     <script src="scripts/controllers/users/UsersCreateCtrl.js"></script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/app/scripts/app.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/app.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/app.js
index b580da7..868dafe 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/app.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/app.js
@@ -23,7 +23,8 @@ angular.module('ambariAdminConsole', [
   'ui.bootstrap',
   'restangular',
   'angularAlert',
-  'toggle-switch'
+  'toggle-switch',
+  'pascalprecht.translate'
 ])
 .constant('Settings',{
 	baseUrl: '/api/v1'

http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsEditCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsEditCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsEditCtrl.js
index 4355f63..bfe0d14 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsEditCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsEditCtrl.js
@@ -49,11 +49,7 @@ angular.module('ambariAdminConsole')
 
   // Get META for properties
   View.getMeta($routeParams.viewId, $routeParams.version).then(function(data) {
-    var meta = {};
-    angular.forEach(data.data.ViewVersionInfo.parameters, function(parameter) {
-      meta[parameter.name] = parameter;
-    });
-    $scope.configurationMeta = meta;
+    $scope.configurationMeta = data.data.ViewVersionInfo.parameters;
     reloadViewInfo();
   });
 
@@ -76,7 +72,6 @@ angular.module('ambariAdminConsole')
 
   $scope.permissions = [];
   
-  // reloadViewInfo();
   reloadViewPrivilegies();
 
   $scope.editSettingsDisabled = true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsEditCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsEditCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsEditCtrl.js
index a322c1d..8164240 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsEditCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsEditCtrl.js
@@ -86,13 +86,13 @@ angular.module('ambariAdminConsole')
       privilegie = privilegie.PrivilegeInfo;
       if(privilegie.type === 'CLUSTER'){
         // This is cluster
-        privilegies.clusters[privilegie.cluster_name] = privilegies.clusters[privilegie.cluster_name] || '';
-        privilegies.clusters[privilegie.cluster_name] += privilegies.clusters[privilegie.cluster_name] ? ', ' + privilegie.permission_name : privilegie.permission_name;
+        privilegies.clusters[privilegie.cluster_name] = privilegies.clusters[privilegie.cluster_name] || [];
+        privilegies.clusters[privilegie.cluster_name].push(privilegie.permission_name);
       } else if ( privilegie.type === 'VIEW'){
-        privilegies.views[privilegie.instance_name] = privilegies.views[privilegie.instance_name] || { privileges:''};
+        privilegies.views[privilegie.instance_name] = privilegies.views[privilegie.instance_name] || { privileges:[]};
         privilegies.views[privilegie.instance_name].version = privilegie.version;
         privilegies.views[privilegie.instance_name].view_name = privilegie.view_name;
-        privilegies.views[privilegie.instance_name].privileges += privilegies.views[privilegie.instance_name].privileges ? ', ' + privilegie.permission_name : privilegie.permission_name;
+        privilegies.views[privilegie.instance_name].privileges.push(privilegie.permission_name);
       }
     });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/mainCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/mainCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/mainCtrl.js
index c6a118a..6f68831 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/mainCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/mainCtrl.js
@@ -39,4 +39,4 @@ angular.module('ambariAdminConsole')
   };
 
   $scope.currentUser = Auth.getCurrentUser();
-}]);
\ No newline at end of file
+}]);

http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/users/UsersShowCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/users/UsersShowCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/users/UsersShowCtrl.js
index f3e4240..0aaf871 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/users/UsersShowCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/users/UsersShowCtrl.js
@@ -122,7 +122,7 @@ angular.module('ambariAdminConsole')
 
   $scope.toggleUserActive = function() {
     if(!$scope.isCurrentUser){
-      ConfirmationModal.show('Change Status', 'Are you sure you want to change "'+ $scope.user.user_name +'" status?').then(function() {
+      ConfirmationModal.show('Change Status', 'Are you sure you want to change status for user "'+ $scope.user.user_name +'" to '+($scope.user.active ? 'inactive' : 'active')+'?').then(function() {
         User.setActive($scope.user.user_name, $scope.user.active);
       })
       .catch(function() {
@@ -132,7 +132,13 @@ angular.module('ambariAdminConsole')
   };    
   $scope.toggleUserAdmin = function() {
     if(!$scope.isCurrentUser){
-      ConfirmationModal.show('Change Admin Privilege', 'Are you sure you want to change "'+$scope.user.user_name+'" Admin privilege?').then(function() {
+      var message = '';
+      if( !$scope.user.admin ){
+        message = 'Are you sure you want to grant Admin privilege to user ';
+      } else {
+        message = 'Are you sure you want to revoke Admin privilege from user ';
+      }
+      ConfirmationModal.show('Change Admin Privilege', message + '"'+$scope.user.user_name+'"?').then(function() {
         User.setAdmin($scope.user.user_name, $scope.user.admin)
         .then(function() {
           loadPrivilegies();
@@ -164,13 +170,13 @@ angular.module('ambariAdminConsole')
         privilegie = privilegie.PrivilegeInfo;
         if(privilegie.type === 'CLUSTER'){
           // This is cluster
-          privilegies.clusters[privilegie.cluster_name] = privilegies.clusters[privilegie.cluster_name] || '';
-          privilegies.clusters[privilegie.cluster_name] += privilegies.clusters[privilegie.cluster_name] ? ', ' + privilegie.permission_name : privilegie.permission_name;
+          privilegies.clusters[privilegie.cluster_name] = privilegies.clusters[privilegie.cluster_name] || [];
+          privilegies.clusters[privilegie.cluster_name].push(privilegie.permission_name);
         } else if ( privilegie.type === 'VIEW'){
-          privilegies.views[privilegie.instance_name] = privilegies.views[privilegie.instance_name] || { privileges:''};
+          privilegies.views[privilegie.instance_name] = privilegies.views[privilegie.instance_name] || { privileges:[]};
           privilegies.views[privilegie.instance_name].version = privilegie.version;
           privilegies.views[privilegie.instance_name].view_name = privilegie.view_name;
-          privilegies.views[privilegie.instance_name].privileges += privilegies.views[privilegie.instance_name].privileges ? ', ' + privilegie.permission_name : privilegie.permission_name;
+          privilegies.views[privilegie.instance_name].privileges.push(privilegie.permission_name);
 
         }
       });

http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
new file mode 100644
index 0000000..d9f5eb5
--- /dev/null
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+'use strict';
+
+angular.module('ambariAdminConsole')
+.config(['$translateProvider', function($translateProvider) {
+  $translateProvider.translations('en',{
+    'CLUSTER.OPERATE': 'Operator',
+    'CLUSTER.READ': 'Read-Only',
+    'VIEW.USE': 'Use'
+  });
+
+  $translateProvider.preferredLanguage('en');
+}]);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
index bfa0032..3d5628a 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
@@ -191,6 +191,9 @@
   ------ END editable-list DIRECTIVE SECTION ------ -
 */
 
+.tooltip-inner{
+  word-wrap: break-word;
+}
 
  .instances-table{
   table-layout: fixed;
@@ -962,3 +965,9 @@ button.btn.btn-xs{
   float: left;
   margin-right: 5px;
 }
+
+.no-animation *{
+  -webkit-transition: none!important;
+  -o-transition: none!important;
+  transition: none!important;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/edit.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/edit.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/edit.html
index 02d4f77..92d319d 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/edit.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/edit.html
@@ -108,7 +108,7 @@
       <tbody>
         <tr ng-repeat="permission in permissions">
           <td>
-            <label class="">{{permission.PermissionInfo.permission_name}}</label>
+            <label class="" tooltip="{{permission.PermissionInfo.permission_name}}">{{permission.PermissionInfo.permission_name | translate}}</label>
           </td>
           <td>
             <editable-list items-source="permissionsEdit[permission.PermissionInfo.permission_name].USER" editable="true" resource-type="User"></editable-list>
@@ -136,11 +136,11 @@
   <div class="panel-body">
     <form name="propertiesForm" class="form-horizontal" ng-hide="isConfigurationEmpty" novalidate>
       <fieldset ng-disabled="editConfigurationDisabled">
-        <div class="form-group" ng-repeat="(propertyName, propertyValue) in configurationMeta" ng-class="{'has-error' : propertyValue.required && propertiesForm[propertyName].$error.required && !editConfigurationDisabled}">
-          <label for="" class="control-label col-sm-3" ng-class="{'not-required': !propertyValue.required}" tooltip="{{propertyValue.description}}">{{propertyName}}{{propertyValue.required ? '*' : ''}}</label>
+        <div class="form-group" ng-repeat="property in configurationMeta" ng-class="{'has-error' : property.required && propertiesForm[property.name].$error.required && !editConfigurationDisabled}">
+          <label for="" class="control-label col-sm-3" ng-class="{'not-required': !property.required}" tooltip="{{property.description}}">{{property.name}}{{property.required ? '*' : ''}}</label>
           <div class="col-sm-9">
-            <input type="{{propertyValue.masked ? 'password' : 'text'}}" class="form-control propertie-input" ng-required="propertyValue.required" name="{{propertyName}}" ng-model="configuration[propertyName]">
-            <div class="alert alert-danger no-margin-bottom top-margin" ng-show='propertyValue.required && propertiesForm[propertyName].$error.required && !editConfigurationDisabled'>
+            <input type="{{property.masked ? 'password' : 'text'}}" class="form-control propertie-input" ng-required="property.required" name="{{property.name}}" ng-model="configuration[property.name]">
+            <div class="alert alert-danger no-margin-bottom top-margin" ng-show='property.required && propertiesForm[property.name].$error.required && !editConfigurationDisabled'>
               This field is required.
             </div>
           </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/manageAccess.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/manageAccess.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/manageAccess.html
index 6de2561..d5dcfa2 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/manageAccess.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/manageAccess.html
@@ -33,7 +33,7 @@
     </thead>
     <tbody>
       <tr ng-repeat="permission in permissions">
-        <td><label class="">{{permission.PermissionInfo.permission_name}}</label></td>
+        <td><label class="" tooltip="{{permission.PermissionInfo.permission_name}}">{{permission.PermissionInfo.permission_name | translate}}</label></td>
         <td>
           <div class="" ng-switch="isEditMode">
             <editable-list items-source="permissionsEdit[permission.PermissionInfo.permission_name].USER" resource-type="User" editable="true"></editable-list>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/edit.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/edit.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/edit.html
index 5b90f79..f11ef9e 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/edit.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/edit.html
@@ -61,7 +61,7 @@
                 <a href="#/clusters/{{name}}/manageAccess">{{name}}</a>
               </td>
               <td>
-                {{privilege}}
+                <span tooltip="{{item}}" ng-repeat="item in privilege">{{item | translate}}{{$last ? '' : ', '}}</span>
               </td>
             </tr>
             <tr ng-repeat="(name, privilege) in privileges.views">
@@ -70,7 +70,7 @@
                 <a href="#/views/{{privilege.view_name}}/versions/{{privilege.version}}/instances/{{name}}/edit">{{name}}</a>
               </td>
               <td>
-                {{privilege.privileges}}
+                <span tooltip="{{item}}" ng-repeat="item in privilege.privileges">{{item | translate}}{{$last ? '' : ', '}}</span>
               </td>
             </tr>
           </tbody>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/app/views/users/show.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/users/show.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/users/show.html
index 0667200..bff097f 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/users/show.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/users/show.html
@@ -40,13 +40,13 @@
     <div class="form-group">
       <label for="" class="col-sm-2 control-label">Status</label>
       <div class="col-sm-10">
-        <toggle-switch on-change="toggleUserActive()" disabled-tooltip="Cannot Change Status" ng-disabled="isCurrentUser" model="user.active" on-label="Active" off-label="Inactive" class="switch-primary userstatus" data-off-color="danger"></toggle-switch>
+        <toggle-switch on-change="toggleUserActive()" disabled-tooltip="Cannot Change Status" ng-disabled="isCurrentUser" model="user.active" on-label="Active" off-label="Inactive" class="switch-primary userstatus {{user ? '' : 'no-animation'}}" data-off-color="danger"></toggle-switch>
       </div>
     </div>
     <div class="form-group">
       <label for="" class="col-sm-2 control-label"><span class="glyphicon glyphicon-flash"></span> Ambari Admin</label>
       <div class="col-sm-10">
-        <toggle-switch on-change="toggleUserAdmin()" disabled-tooltip="Cannot Change Admin" ng-disabled="isCurrentUser" model="user.admin" on-label="Yes" off-label="No" class="switch-primary userstatus" data-off-color="danger"></toggle-switch>
+        <toggle-switch on-change="toggleUserAdmin()" disabled-tooltip="Cannot Change Admin" ng-disabled="isCurrentUser" model="user.admin" on-label="Yes" off-label="No" class="switch-primary userstatus {{user ? '' : 'no-animation'}}" data-off-color="danger"></toggle-switch>
       </div>
     </div>
     <div class="form-group">
@@ -83,7 +83,7 @@
                 <a href="#/clusters/{{name}}/manageAccess">{{name}}</a>
               </td>
               <td>
-                {{privilege}}
+                <span tooltip="{{item}}" ng-repeat="item in privilege">{{item | translate}}{{$last ? '' : ', '}}</span>
               </td>
             </tr>
             <tr ng-repeat="(name, privilege) in privileges.views">
@@ -92,7 +92,7 @@
                 <a href="#/views/{{privilege.view_name}}/versions/{{privilege.version}}/instances/{{name}}/edit">{{name}}</a>
               </td>
               <td>
-                {{privilege.privileges}}
+                <span tooltip="{{item}}" ng-repeat="item in privilege.privileges">{{item | translate}}{{$last ? '' : ', '}}</span> 
               </td>
             </tr>
           </tbody>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d961ca00/ambari-admin/src/main/resources/ui/admin-web/bower.json
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/bower.json b/ambari-admin/src/main/resources/ui/admin-web/bower.json
index 27429c2..896e5bd 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/bower.json
+++ b/ambari-admin/src/main/resources/ui/admin-web/bower.json
@@ -8,7 +8,9 @@
     "angular-bootstrap": "~0.11.0",
     "restangular": "~1.4.0",
     "angular-bootstrap-toggle-switch": "~0.5.1",
-    "angular-animate": "~1.2.23"
+    "angular-animate": "~1.2.23",
+    "angular-translate": "~2.2.0",
+    "font-awesome": "~4.2.0"
   },
   "devDependencies": {}
 }


[07/30] git commit: AMBARI-7212 Enable security fails for Zookeeper using HDP 1.3 (alejandro)

Posted by jo...@apache.org.
AMBARI-7212 Enable security fails for Zookeeper using HDP 1.3 (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9ed5e3be
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9ed5e3be
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9ed5e3be

Branch: refs/heads/branch-alerts-dev
Commit: 9ed5e3be948de91a5be923d880b44342343b62d9
Parents: 9d201f5
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Mon Sep 8 17:48:13 2014 -0700
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Thu Sep 11 14:09:00 2014 -0700

----------------------------------------------------------------------
 ambari-web/app/data/secure_properties.js | 15 +++++++++++++++
 1 file changed, 15 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9ed5e3be/ambari-web/app/data/secure_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/secure_properties.js b/ambari-web/app/data/secure_properties.js
index ba1e26d..a00783e 100644
--- a/ambari-web/app/data/secure_properties.js
+++ b/ambari-web/app/data/secure_properties.js
@@ -752,6 +752,21 @@ module.exports =
       "category": "ZooKeeper Server",
       "component": "ZOOKEEPER_SERVER"
     },
+    {
+      "id": "puppet var",
+      "name": "zookeeper_keytab_path",
+      "displayName": "Path to keytab file",
+      "value": "",
+      "defaultValue": "/etc/security/keytabs/zk.service.keytab",
+      "description": "Path to ZooKeeper keytab file",
+      "displayType": "directory",
+      "isVisible": true,
+      "isOverridable": false,
+      "serviceName": "ZOOKEEPER",
+      "filename": "zookeeper-env.xml",
+      "category": "ZooKeeper Server",
+      "component": "ZOOKEEPER_SERVER"
+    },
     //NAGIOS
     {
       "id": "puppet var",


[28/30] git commit: AMBARI-7288. Slider View: Create Slider App wizard config sections should have triangles (alexantonenko)

Posted by jo...@apache.org.
AMBARI-7288. Slider View: Create Slider App wizard config sections should have triangles (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4209a492
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4209a492
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4209a492

Branch: refs/heads/branch-alerts-dev
Commit: 4209a492938e541be626d524bf46181bfc8d3f9b
Parents: 85a8977
Author: Alex Antonenko <hi...@gmail.com>
Authored: Fri Sep 12 21:32:40 2014 +0300
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Sat Sep 13 01:30:50 2014 +0300

----------------------------------------------------------------------
 .../main/resources/ui/app/styles/application.less    | 15 +++++++++++++++
 .../ui/app/templates/createAppWizard/step3.hbs       |  2 +-
 .../ui/app/views/createAppWizard/step3_view.js       |  3 ++-
 3 files changed, 18 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4209a492/contrib/views/slider/src/main/resources/ui/app/styles/application.less
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/styles/application.less b/contrib/views/slider/src/main/resources/ui/app/styles/application.less
index b415da0..c193d3e 100644
--- a/contrib/views/slider/src/main/resources/ui/app/styles/application.less
+++ b/contrib/views/slider/src/main/resources/ui/app/styles/application.less
@@ -383,6 +383,16 @@ a {
       margin-bottom: 30px;
     }
   }
+  .app-wiz-configs {
+    .accordion-toggle {
+      display: block;
+    }
+    .panel-heading{
+      .icon{
+        width: 23px;
+      }
+    }
+  }
 }
 
 .app-page {
@@ -636,6 +646,11 @@ a {
   a.accordion-toggle {
     display: block;
   }
+  .panel-heading{
+    .icon{
+      width: 24px;
+    }
+  }
   .row {
     .table-row();
     textarea {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4209a492/contrib/views/slider/src/main/resources/ui/app/templates/createAppWizard/step3.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/templates/createAppWizard/step3.hbs b/contrib/views/slider/src/main/resources/ui/app/templates/createAppWizard/step3.hbs
index c7ae662..8faa52c 100644
--- a/contrib/views/slider/src/main/resources/ui/app/templates/createAppWizard/step3.hbs
+++ b/contrib/views/slider/src/main/resources/ui/app/templates/createAppWizard/step3.hbs
@@ -19,7 +19,7 @@
 <p>
   {{t wizard.step3.header}}
 </p>
-<div {{bind-attr class="controller.isError:has-error :form-group"}}>
+<div {{bind-attr class="controller.isError:has-error :form-group :app-wiz-configs"}}>
   {{#each controller.sectionKeys}}
     {{config-section
       section=this

http://git-wip-us.apache.org/repos/asf/ambari/blob/4209a492/contrib/views/slider/src/main/resources/ui/app/views/createAppWizard/step3_view.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/views/createAppWizard/step3_view.js b/contrib/views/slider/src/main/resources/ui/app/views/createAppWizard/step3_view.js
index e612c0a..5d3bd57 100644
--- a/contrib/views/slider/src/main/resources/ui/app/views/createAppWizard/step3_view.js
+++ b/contrib/views/slider/src/main/resources/ui/app/views/createAppWizard/step3_view.js
@@ -16,9 +16,10 @@
  * limitations under the License.
  */
 
-App.CreateAppWizardStep3View = Ember.View.extend({
+App.CreateAppWizardStep3View = Ember.View.extend(App.WithPanels, {
 
   didInsertElement: function () {
     this.get('controller').loadStep();
+    this.addCarets();
   }
 });