You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2017/02/24 14:19:08 UTC

[01/50] ambari git commit: Revert "AMBARI-20067. Finalize Operations stage fails when Enabling Kerberos using the manual option (rlevas)"

Repository: ambari
Updated Branches:
  refs/heads/branch-feature-AMBARI-12556 0c3478b1c -> 353a076c8


Revert "AMBARI-20067. Finalize Operations stage fails when Enabling Kerberos using the manual option (rlevas)"

This reverts commit bfaf6139c24c63fbec01c64078c8392c5fec6b27.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/235f5264
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/235f5264
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/235f5264

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 235f5264a723c4c289389f46e28ae9955e1a7491
Parents: 6553ffa
Author: Robert Levas <rl...@hortonworks.com>
Authored: Tue Feb 21 17:44:57 2017 -0500
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Tue Feb 21 17:44:57 2017 -0500

----------------------------------------------------------------------
 .../kerberos/FinalizeKerberosServerAction.java  |  14 +-
 .../kerberos/KerberosServerAction.java          |   2 +-
 .../FinalizeKerberosServerActionTest.java       | 207 -------------------
 3 files changed, 7 insertions(+), 216 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/235f5264/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java
index 2742390..9f443b9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java
@@ -200,14 +200,12 @@ public class FinalizeKerberosServerAction extends KerberosServerAction {
       }
     }
 
-    if(getKDCType(getCommandParameters()) != KDCType.NONE) {
-      // Ensure the keytab files for the Ambari identities have the correct permissions
-      // This is important in the event a secure cluster was created via Blueprints since some
-      // user accounts and group may not have been created when the keytab files were created.
-      requestSharedDataContext.put(this.getClass().getName() + "_visited", new HashSet<String>());
-      processIdentities(requestSharedDataContext);
-      requestSharedDataContext.remove(this.getClass().getName() + "_visited");
-    }
+    // Ensure the keytab files for the Ambari identities have the correct permissions
+    // This is important in the event a secure cluster was created via Blueprints since some
+    // user accounts and group may not have been created when the keytab files were created.
+    requestSharedDataContext.put(this.getClass().getName() + "_visited", new HashSet<String>());
+    processIdentities(requestSharedDataContext);
+    requestSharedDataContext.remove(this.getClass().getName() + "_visited");
 
     // Make sure this is a relevant directory. We don't want to accidentally allow _ANY_ directory
     // to be deleted.

http://git-wip-us.apache.org/repos/asf/ambari/blob/235f5264/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
index d404133..cab3d8d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
@@ -195,7 +195,7 @@ public abstract class KerberosServerAction extends AbstractServerAction {
     String kdcType = getCommandParameterValue(commandParameters, KDC_TYPE);
 
     return ((kdcType == null) || kdcType.isEmpty())
-        ? KDCType.NONE
+        ? KDCType.MIT_KDC
         : KDCType.translate(kdcType);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/235f5264/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerActionTest.java
deleted file mode 100644
index cec482e..0000000
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerActionTest.java
+++ /dev/null
@@ -1,207 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.serveraction.kerberos;
-
-import junit.framework.Assert;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.Role;
-import org.apache.ambari.server.RoleCommand;
-import org.apache.ambari.server.actionmanager.HostRoleCommand;
-import org.apache.ambari.server.actionmanager.HostRoleStatus;
-import org.apache.ambari.server.agent.CommandReport;
-import org.apache.ambari.server.agent.ExecutionCommand;
-import org.apache.ambari.server.audit.AuditLogger;
-import org.apache.ambari.server.controller.KerberosHelper;
-import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
-import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Host;
-import org.apache.ambari.server.state.SecurityState;
-import org.apache.ambari.server.state.ServiceComponentHost;
-import org.easymock.EasyMockSupport;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-
-import static org.apache.ambari.server.serveraction.kerberos.KerberosServerAction.DATA_DIRECTORY_PREFIX;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-
-public class FinalizeKerberosServerActionTest extends EasyMockSupport {
-  @Rule
-  public TemporaryFolder folder = new TemporaryFolder();
-
-  @Test
-  public void executeMITKDCOption() throws Exception {
-    String clusterName = "c1";
-    Injector injector = setup(clusterName);
-
-    File dataDirectory = createDataDirectory();
-
-    Map<String, String> commandParams = new HashMap<>();
-    commandParams.put(KerberosServerAction.KDC_TYPE, KDCType.MIT_KDC.name());
-    commandParams.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
-
-    ExecutionCommand executionCommand = createMockExecutionCommand(clusterName, commandParams);
-    HostRoleCommand hostRoleCommand = createMockHostRoleCommand();
-
-    PrincipalKeyCredential principleKeyCredential = createMock(PrincipalKeyCredential.class);
-
-    KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
-    expect(kerberosHelper.getKDCAdministratorCredentials(clusterName)).andReturn(principleKeyCredential).anyTimes();
-
-    replayAll();
-
-    ConcurrentMap<String, Object> requestSharedDataContext = new ConcurrentHashMap<String, Object>();
-
-    FinalizeKerberosServerAction action = injector.getInstance(FinalizeKerberosServerAction.class);
-    action.setExecutionCommand(executionCommand);
-    action.setHostRoleCommand(hostRoleCommand);
-
-    Assert.assertTrue(dataDirectory.exists());
-
-    CommandReport commandReport = action.execute(requestSharedDataContext);
-
-    assertSuccess(commandReport);
-    Assert.assertTrue(!dataDirectory.exists());
-
-    verifyAll();
-  }
-
-  @Test
-  public void executeManualOption() throws Exception {
-    String clusterName = "c1";
-    Injector injector = setup(clusterName);
-
-    File dataDirectory = createDataDirectory();
-
-    Map<String, String> commandParams = new HashMap<>();
-    commandParams.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
-
-    ExecutionCommand executionCommand = createMockExecutionCommand(clusterName, commandParams);
-    HostRoleCommand hostRoleCommand = createMockHostRoleCommand();
-
-    replayAll();
-
-    ConcurrentMap<String, Object> requestSharedDataContext = new ConcurrentHashMap<String, Object>();
-
-    FinalizeKerberosServerAction action = injector.getInstance(FinalizeKerberosServerAction.class);
-    action.setExecutionCommand(executionCommand);
-    action.setHostRoleCommand(hostRoleCommand);
-
-    Assert.assertTrue(dataDirectory.exists());
-
-    CommandReport commandReport = action.execute(requestSharedDataContext);
-
-    assertSuccess(commandReport);
-
-    Assert.assertTrue(!dataDirectory.exists());
-
-    verifyAll();
-  }
-
-  private File createDataDirectory() throws IOException {
-    File directory = folder.newFolder();
-    File dataDirectory = new File(directory, DATA_DIRECTORY_PREFIX + "_test");
-
-    Assert.assertTrue(dataDirectory.mkdir());
-
-    return dataDirectory;
-  }
-
-  private void assertSuccess(CommandReport commandReport) {
-    Assert.assertEquals(0, commandReport.getExitCode());
-    Assert.assertEquals(HostRoleStatus.COMPLETED.name(), commandReport.getStatus());
-    Assert.assertEquals("{}", commandReport.getStructuredOut());
-  }
-
-  private ExecutionCommand createMockExecutionCommand(String clusterName, Map<String, String> commandParams) {
-    ExecutionCommand executionCommand = createMock(ExecutionCommand.class);
-    expect(executionCommand.getClusterName()).andReturn(clusterName).anyTimes();
-    expect(executionCommand.getCommandParams()).andReturn(commandParams).anyTimes();
-    expect(executionCommand.getRoleCommand()).andReturn(RoleCommand.EXECUTE).anyTimes();
-    expect(executionCommand.getRole()).andReturn(Role.AMBARI_SERVER_ACTION.name()).anyTimes();
-    expect(executionCommand.getConfigurationTags()).andReturn(Collections.<String, Map<String, String>>emptyMap()).anyTimes();
-    expect(executionCommand.getServiceName()).andReturn("AMBARI_SERVER").anyTimes();
-    expect(executionCommand.getTaskId()).andReturn(3L).anyTimes();
-
-    return executionCommand;
-  }
-
-  private HostRoleCommand createMockHostRoleCommand() {
-    HostRoleCommand hostRoleCommand = createMock(HostRoleCommand.class);
-
-    expect(hostRoleCommand.getRequestId()).andReturn(1L).anyTimes();
-    expect(hostRoleCommand.getStageId()).andReturn(2L).anyTimes();
-    expect(hostRoleCommand.getTaskId()).andReturn(3L).anyTimes();
-
-    return hostRoleCommand;
-  }
-
-  private Injector setup(String clusterName) throws AmbariException {
-    final Map<String, Host> clusterHostMap = new HashMap<>();
-    clusterHostMap.put("host1", createMock(Host.class));
-
-    final ServiceComponentHost serviceComponentHost = createMock(ServiceComponentHost.class);
-    expect(serviceComponentHost.getSecurityState()).andReturn(SecurityState.SECURING).anyTimes();
-    expect(serviceComponentHost.getServiceName()).andReturn("SERVICE1").anyTimes();
-    expect(serviceComponentHost.getServiceComponentName()).andReturn("COMPONENT1A").anyTimes();
-    expect(serviceComponentHost.getHostName()).andReturn("host1").anyTimes();
-    expect(serviceComponentHost.getDesiredSecurityState()).andReturn(SecurityState.SECURED_KERBEROS).anyTimes();
-    serviceComponentHost.setSecurityState(SecurityState.SECURED_KERBEROS);
-    expectLastCall().once();
-
-    final List<ServiceComponentHost> serviceComponentHosts = new ArrayList<>();
-    serviceComponentHosts.add(serviceComponentHost);
-
-    final Cluster cluster = createMock(Cluster.class);
-    expect(cluster.getClusterName()).andReturn(clusterName).anyTimes();
-    expect(cluster.getServiceComponentHosts("host1")).andReturn(serviceComponentHosts).anyTimes();
-
-    final Clusters clusters = createMock(Clusters.class);
-    expect(clusters.getHostsForCluster(clusterName)).andReturn(clusterHostMap).anyTimes();
-    expect(clusters.getCluster(clusterName)).andReturn(cluster).anyTimes();
-
-    return Guice.createInjector(new AbstractModule() {
-
-      @Override
-      protected void configure() {
-        bind(KerberosHelper.class).toInstance(createMock(KerberosHelper.class));
-        bind(Clusters.class).toInstance(clusters);
-        bind(AuditLogger.class).toInstance(createNiceMock(AuditLogger.class));
-      }
-    });
-  }
-
-}
\ No newline at end of file


[09/50] ambari git commit: AMBARI-20086. Hive View 2.0: Code Refactoring. Removal of the database cache. (dipayanb)

Posted by nc...@apache.org.
AMBARI-20086. Hive View 2.0: Code Refactoring. Removal of the database cache. (dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3e76e47c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3e76e47c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3e76e47c

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 3e76e47c5430704723801819e7f5b59759242e61
Parents: bc4b8bc
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Wed Feb 22 12:35:57 2017 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Wed Feb 22 12:36:46 2017 +0530

----------------------------------------------------------------------
 .../ambari/view/hive20/ConnectionSystem.java    |  27 --
 .../hive20/actor/DatabaseChangeNotifier.java    | 168 ----------
 .../view/hive20/actor/DatabaseManager.java      | 313 -------------------
 .../ambari/view/hive20/actor/JdbcConnector.java |   4 -
 .../ambari/view/hive20/actor/LogAggregator.java |   4 +-
 .../view/hive20/actor/MetaDataManager.java      | 137 --------
 .../view/hive20/actor/MetaDataRetriever.java    | 173 ----------
 .../view/hive20/actor/TableChangeNotifier.java  |  95 ------
 .../ambari/view/hive20/actor/message/Ping.java  |  50 ---
 .../ambari/view/hive20/client/DDLDelegator.java |   7 +-
 .../view/hive20/client/DDLDelegatorImpl.java    |  52 ++-
 .../view/hive20/internal/dto/TableInfo.java     |  12 +-
 .../view/hive20/internal/dto/TableResponse.java |   9 -
 .../view/hive20/resources/browser/DDLProxy.java |  67 ++--
 .../resources/browser/HiveBrowserService.java   | 259 ---------------
 .../hive20/resources/system/SystemService.java  |  28 +-
 .../utils/MetaDataManagerEventSubmitter.java    |  43 ---
 .../src/main/resources/ui/app/adapters/ping.js  |  35 ---
 .../main/resources/ui/app/adapters/ranger.js    |  27 ++
 .../src/main/resources/ui/app/models/table.js   |   9 +-
 .../main/resources/ui/app/routes/application.js |   5 -
 .../databases/database/tables/table/auth.js     |   2 +-
 .../resources/ui/app/serializers/database.js    |  25 ++
 .../main/resources/ui/app/serializers/table.js  |  22 ++
 .../resources/ui/app/services/keep-alive.js     |  31 --
 .../views/hive20/src/main/resources/view.xml    |   5 -
 26 files changed, 152 insertions(+), 1457 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java
index a6c7334..d7fbf41 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java
@@ -28,7 +28,6 @@ import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive20.actor.DeathWatch;
-import org.apache.ambari.view.hive20.actor.MetaDataManager;
 import org.apache.ambari.view.hive20.actor.OperationController;
 import org.apache.ambari.view.hive20.internal.ConnectionSupplier;
 import org.apache.ambari.view.hive20.internal.DataStorageSupplier;
@@ -47,7 +46,6 @@ public class ConnectionSystem {
   private static volatile ConnectionSystem instance = null;
   private static final Object lock = new Object();
   private static Map<String, Map<String, ActorRef>> operationControllerMap = new ConcurrentHashMap<>();
-  private final Map<String, ActorRef> metaDataManagerMap = new ConcurrentHashMap<>();
 
   // credentials map stores usernames and passwords
   private static Map<String, String> credentialsMap = new ConcurrentHashMap<>();
@@ -107,31 +105,6 @@ public class ConnectionSystem {
     return ref;
   }
 
-  /**
-   * Returns one MetaDataManager actor per view instance
-   * @param context - View context
-   * @return MetaDataManager actor
-   */
-  public synchronized ActorRef getMetaDataManager(ViewContext context) {
-    SafeViewContext safeViewContext = new SafeViewContext(context);
-    String instanceName = safeViewContext.getInstanceName();
-    ActorRef metaDataManager = metaDataManagerMap.get(instanceName);
-    if(metaDataManager == null) {
-      metaDataManager = createMetaDataManager(safeViewContext);
-      metaDataManagerMap.put(instanceName, metaDataManager);
-    }
-
-    return metaDataManager;
-  }
-
-  public synchronized Optional<ActorRef> getMetaDataManagerIfPresent(String instanceName) {
-    return Optional.fromNullable(metaDataManagerMap.get(instanceName));
-  }
-
-  private ActorRef createMetaDataManager(SafeViewContext safeViewContext) {
-    return actorSystem.actorOf(MetaDataManager.props(safeViewContext));
-  }
-
   public synchronized void persistCredentials(String user,String password){
     if(!Strings.isNullOrEmpty(password)){
       credentialsMap.put(user,password);

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseChangeNotifier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseChangeNotifier.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseChangeNotifier.java
deleted file mode 100644
index 37f24d2..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseChangeNotifier.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.actor;
-
-import akka.actor.ActorRef;
-import akka.actor.PoisonPill;
-import akka.actor.Props;
-import com.google.common.collect.Sets;
-import org.apache.ambari.view.hive20.actor.message.HiveMessage;
-import org.apache.ambari.view.hive20.internal.dto.TableInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-/**
- *
- */
-public class DatabaseChangeNotifier extends HiveActor {
-  private final Logger LOG = LoggerFactory.getLogger(getClass());
-
-  private String currentDatabaseName;
-  private Map<String, TableWrapper> tables = new HashMap<>();
-  private Map<String, TableInfo> newTables = new HashMap<>();
-
-  @Override
-  public void handleMessage(HiveMessage hiveMessage) {
-    Object message = hiveMessage.getMessage();
-    if(message instanceof DatabaseAdded) {
-      handleDatabaseAdded((DatabaseAdded) message);
-    } else if ( message instanceof DatabaseRemoved) {
-      handleDatabaseRemoved((DatabaseRemoved) message);
-    } else if (message instanceof TableUpdated) {
-      handleTableUpdated((TableUpdated) message);
-    } else if (message instanceof AllTablesUpdated) {
-      handleAllTableUpdated((AllTablesUpdated) message);
-    }
-  }
-
-  private void handleDatabaseAdded(DatabaseAdded message) {
-    LOG.info("Database Added: {}", message.name);
-    currentDatabaseName = message.name;
-    // TODO: Send event to eventbus
-  }
-
-  private void handleDatabaseRemoved(DatabaseRemoved message) {
-    LOG.info("Database Removed: {}", message.name);
-    // TODO: Send event to eventbus
-  }
-
-  private void handleTableUpdated(TableUpdated message) {
-    LOG.info("XXXXX: table xxxx. Size: {}", newTables.size());
-    newTables.put(message.info.getName(), message.info);
-  }
-
-  private void handleAllTableUpdated(AllTablesUpdated message) {
-    Set<String> oldTableNames = new HashSet<>(tables.keySet());
-    Set<String> newTableNames = new HashSet<>(newTables.keySet());
-
-    Set<String> tablesAdded = Sets.difference(newTableNames, oldTableNames);
-    Set<String> tablesRemoved = Sets.difference(oldTableNames, newTableNames);
-    Set<String> tablesUpdated = Sets.intersection(oldTableNames, newTableNames);
-
-    updateTablesAdded(tablesAdded);
-    updateTablesRemoved(tablesRemoved);
-    updateTablesUpdated(tablesUpdated);
-    newTables.clear();
-  }
-
-  private void updateTablesAdded(Set<String> tablesAdded) {
-    for (String tableName: tablesAdded) {
-      TableWrapper wrapper = new TableWrapper(tableName);
-      tables.put(tableName, wrapper);
-      wrapper.getTableNotifier().tell(new TableChangeNotifier.TableAdded(newTables.get(tableName)), getSelf());
-    }
-  }
-
-  private void updateTablesRemoved(Set<String> tablesRemoved) {
-    for(String tableName: tablesRemoved) {
-      TableWrapper tableWrapper = tables.remove(tableName);
-      tableWrapper.getTableNotifier().tell(new TableChangeNotifier.TableRemoved(tableName), getSelf());
-      tableWrapper.getTableNotifier().tell(PoisonPill.getInstance(), getSelf());
-    }
-  }
-
-  private void updateTablesUpdated(Set<String> tablesUpdated) {
-    for(String tableName: tablesUpdated) {
-      TableWrapper tableWrapper = tables.get(tableName);
-      // TODO: Check what needs to be done here.
-    }
-  }
-
-  public static Props props() {
-    return Props.create(DatabaseChangeNotifier.class);
-  }
-
-  public class TableWrapper {
-    private final String tableName;
-    private final ActorRef tableNotifier;
-
-    private TableWrapper(String tableName) {
-      this.tableName = tableName;
-      this.tableNotifier = getContext().actorOf(TableChangeNotifier.props());
-    }
-
-    public String getTableName() {
-      return tableName;
-    }
-
-    public ActorRef getTableNotifier() {
-      return tableNotifier;
-    }
-  }
-
-  public static class DatabaseAdded {
-    private final String name;
-
-    public DatabaseAdded(String name) {
-      this.name = name;
-    }
-  }
-
-
-  public static class DatabaseRemoved {
-    private final String name;
-
-    public DatabaseRemoved(String name) {
-      this.name = name;
-    }
-  }
-
-  public static class TableUpdated {
-    private final TableInfo info;
-
-    public TableUpdated(TableInfo info) {
-      this.info = info;
-    }
-  }
-
-  public static class AllTablesUpdated {
-    private final String database;
-
-    public AllTablesUpdated(String database) {
-      this.database = database;
-    }
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseManager.java
deleted file mode 100644
index bd7c6bd..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseManager.java
+++ /dev/null
@@ -1,313 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.actor;
-
-import akka.actor.ActorRef;
-import akka.actor.PoisonPill;
-import akka.actor.Props;
-import com.google.common.base.Function;
-import com.google.common.collect.FluentIterable;
-import com.google.common.collect.Sets;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive20.AuthParams;
-import org.apache.ambari.view.hive20.ConnectionFactory;
-import org.apache.ambari.view.hive20.actor.message.HiveMessage;
-import org.apache.ambari.view.hive20.client.ConnectionConfig;
-import org.apache.ambari.view.hive20.internal.Connectable;
-import org.apache.ambari.view.hive20.internal.HiveConnectionWrapper;
-import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
-import org.apache.ambari.view.hive20.internal.dto.TableInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import scala.concurrent.duration.Duration;
-
-import javax.annotation.Nullable;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-
-/**
- * Manages database related state, queries Hive to get the list of databases and then manages state for each database.
- * Also, periodically updates the list of databases by calling hive.
- */
-public class DatabaseManager extends HiveActor {
-
-  private final Logger LOG = LoggerFactory.getLogger(getClass());
-
-  private final Connectable connectable;
-
-  private final ActorRef metaDataRetriever;
-  private final String username;
-
-  private boolean refreshInProgress = false;
-  private boolean selfRefreshQueued = false;
-
-  private Map<String, DatabaseWrapper> databases = new HashMap<>();
-  private Set<String> databasesToUpdate;
-
-
-  public DatabaseManager(String username, Connectable connectable) {
-    this.username = username;
-    this.connectable = connectable;
-    metaDataRetriever = getContext().actorOf(MetaDataRetriever.props(connectable));
-  }
-
-  @Override
-  public void handleMessage(HiveMessage hiveMessage) {
-
-    Object message = hiveMessage.getMessage();
-    if (message instanceof Refresh) {
-      handleRefresh((Refresh) message);
-    } else if (message instanceof SelfRefresh) {
-      handleSelfRefresh();
-    } else if (message instanceof MetaDataRetriever.DBRefreshed) {
-      handleDBRefreshed((MetaDataRetriever.DBRefreshed) message);
-    } else if (message instanceof MetaDataRetriever.TableRefreshed) {
-      handleTableRefreshed((MetaDataRetriever.TableRefreshed) message);
-    } else if (message instanceof MetaDataRetriever.AllTableRefreshed) {
-      handleAllTableRefreshed((MetaDataRetriever.AllTableRefreshed) message);
-    } else if (message instanceof GetDatabases) {
-      handleGetDatabases((GetDatabases) message);
-    }
-
-  }
-
-  private void handleSelfRefresh() {
-    if (refreshInProgress) {
-      getContext().system().scheduler().scheduleOnce(Duration.create(500, TimeUnit.MILLISECONDS),
-          getSelf(), new SelfRefresh(), getContext().dispatcher(), getSelf());
-    } else {
-      selfRefreshQueued = false;
-      refresh(true);
-    }
-  }
-
-  private void handleRefresh(Refresh message) {
-    if (refreshInProgress && selfRefreshQueued) {
-      return; // We will not honor refresh message when a refresh is going on and another self refresh is queued in mailbox
-    } else if (refreshInProgress) {
-      selfRefreshQueued = true; // If refresh is in progress, we will queue up only one refresh message.
-      getContext().system().scheduler().scheduleOnce(Duration.create(500, TimeUnit.MILLISECONDS),
-          getSelf(), new SelfRefresh(), getContext().dispatcher(), getSelf());
-    } else {
-      refresh(message.initiateScheduler());
-    }
-  }
-
-  private void handleDBRefreshed(MetaDataRetriever.DBRefreshed message) {
-    Set<DatabaseInfo> databasesInfos = message.getDatabases();
-    Set<String> currentDatabases = new HashSet<>(databases.keySet());
-    Set<String> newDatabases = FluentIterable.from(databasesInfos).transform(new Function<DatabaseInfo, String>() {
-      @Nullable
-      @Override
-      public String apply(@Nullable DatabaseInfo databaseInfo) {
-        return databaseInfo.getName();
-      }
-    }).toSet();
-
-    databasesToUpdate = new HashSet<>(newDatabases);
-
-    Set<String> databasesAdded = Sets.difference(newDatabases, currentDatabases);
-    Set<String> databasesRemoved = Sets.difference(currentDatabases, newDatabases);
-
-    updateDatabasesAdded(databasesAdded, databasesInfos);
-    updateDatabasesRemoved(databasesRemoved);
-  }
-
-  private void updateDatabasesAdded(Set<String> databasesAdded, Set<DatabaseInfo> databasesInfos) {
-    for (DatabaseInfo info : databasesInfos) {
-      if (databasesAdded.contains(info.getName())) {
-        DatabaseWrapper wrapper = new DatabaseWrapper(info);
-        databases.put(info.getName(), wrapper);
-        wrapper.getDatabaseNotifier().tell(new DatabaseChangeNotifier.DatabaseAdded(info.getName()), getSelf());
-      }
-    }
-  }
-
-  private void updateDatabasesRemoved(Set<String> databasesRemoved) {
-    for (String database : databasesRemoved) {
-      DatabaseWrapper wrapper = databases.remove(database);
-      ActorRef notifier = wrapper.getDatabaseNotifier();
-      notifier.tell(new DatabaseChangeNotifier.DatabaseRemoved(database), getSelf());
-      notifier.tell(PoisonPill.getInstance(), getSelf());
-    }
-  }
-
-  private void handleTableRefreshed(MetaDataRetriever.TableRefreshed message) {
-    ActorRef databaseChangeNotifier = getDatabaseChangeNotifier(message.getDatabase());
-    updateTable(message.getDatabase(), message.getTable());
-    databaseChangeNotifier.tell(new DatabaseChangeNotifier.TableUpdated(message.getTable()), getSelf());
-  }
-
-  private void handleAllTableRefreshed(MetaDataRetriever.AllTableRefreshed message) {
-    ActorRef databaseChangeNotifier = getDatabaseChangeNotifier(message.getDatabase());
-    updateRemovedTables(message.getDatabase(), message.getCurrentTableNames());
-    databaseChangeNotifier.tell(new DatabaseChangeNotifier.AllTablesUpdated(message.getDatabase()), getSelf());
-    if (checkIfAllTablesOfAllDatabaseRefeshed(message)) {
-      refreshInProgress = false;
-    }
-  }
-
-  private void handleGetDatabases(GetDatabases message) {
-    if (refreshInProgress) {
-      // If currently refreshing, then schedule the same message after 500 milliseconds
-      getContext().system().scheduler().scheduleOnce(Duration.create(500, TimeUnit.MILLISECONDS),
-          getSelf(), message, getContext().dispatcher(), getSender());
-      return;
-    }
-    Set<DatabaseInfo> infos = new HashSet<>();
-    for (DatabaseWrapper wrapper : databases.values()) {
-      infos.add(wrapper.getDatabase());
-    }
-    getSender().tell(new DatabasesResult(infos), getSelf());
-  }
-
-  private boolean checkIfAllTablesOfAllDatabaseRefeshed(MetaDataRetriever.AllTableRefreshed message) {
-    databasesToUpdate.remove(message.getDatabase());
-    return databasesToUpdate.isEmpty();
-  }
-
-  private ActorRef getDatabaseChangeNotifier(String databaseName) {
-    DatabaseWrapper wrapper = databases.get(databaseName);
-    ActorRef databaseChangeNotifier = null;
-    if (wrapper != null) {
-      databaseChangeNotifier = wrapper.getDatabaseNotifier();
-    }
-    return databaseChangeNotifier;
-  }
-
-  private void refresh(boolean initiateScheduler) {
-    LOG.info("Received refresh for user");
-    refreshInProgress = true;
-    metaDataRetriever.tell(new MetaDataRetriever.RefreshDB(), getSelf());
-
-    if (initiateScheduler) {
-      scheduleRefreshAfter(1, TimeUnit.MINUTES);
-    }
-  }
-
-  private void scheduleRefreshAfter(long time, TimeUnit timeUnit) {
-    getContext().system().scheduler().scheduleOnce(Duration.create(time, timeUnit),
-        getSelf(), new Refresh(username), getContext().dispatcher(), getSelf());
-  }
-
-  @Override
-  public void postStop() throws Exception {
-    LOG.info("Database Manager stopped!!!");
-    connectable.disconnect();
-  }
-
-  private void updateTable(String databaseName, TableInfo table) {
-    DatabaseWrapper wrapper = databases.get(databaseName);
-    if (wrapper != null) {
-      DatabaseInfo info = wrapper.getDatabase();
-      info.getTables().add(table);
-    }
-  }
-
-  private void updateRemovedTables(String database, Set<String> currentTableNames) {
-    DatabaseWrapper wrapper = databases.get(database);
-    HashSet<TableInfo> notRemovedTables = new HashSet<>();
-    if (wrapper != null) {
-      DatabaseInfo info = wrapper.getDatabase();
-      for (TableInfo tableInfo : info.getTables()) {
-        if (currentTableNames.contains(tableInfo.getName())) {
-          notRemovedTables.add(tableInfo);
-        }
-      }
-      info.setTables(notRemovedTables);
-    }
-  }
-
-  public static Props props(ViewContext context) {
-    ConnectionConfig config = ConnectionFactory.create(context);
-    Connectable connectable = new HiveConnectionWrapper(config.getJdbcUrl(), config.getUsername(), config.getPassword(), new AuthParams(context));
-    return Props.create(DatabaseManager.class, config.getUsername(), connectable);
-  }
-
-  public static class Refresh {
-    private final String username;
-    private final boolean initiateScheduler;
-
-
-    public Refresh(String username) {
-      this(username, true);
-    }
-
-    public Refresh(String username, boolean initiateScheduler) {
-      this.username = username;
-      this.initiateScheduler = initiateScheduler;
-    }
-
-    public String getUsername() {
-      return username;
-    }
-
-    public boolean initiateScheduler() {
-      return initiateScheduler;
-    }
-  }
-
-  private static class SelfRefresh {
-  }
-
-  private class DatabaseWrapper {
-    private final DatabaseInfo database;
-    private final ActorRef databaseNotifier;
-
-    private DatabaseWrapper(DatabaseInfo database) {
-      this.database = database;
-      databaseNotifier = getContext().actorOf(DatabaseChangeNotifier.props());
-    }
-
-    public DatabaseInfo getDatabase() {
-      return database;
-    }
-
-    public ActorRef getDatabaseNotifier() {
-      return databaseNotifier;
-    }
-  }
-
-  public static class GetDatabases {
-    private final String username;
-
-    public GetDatabases(String username) {
-      this.username = username;
-    }
-
-    public String getUsername() {
-      return username;
-    }
-  }
-
-  public static class DatabasesResult {
-    private final Set<DatabaseInfo> databases;
-
-    public DatabasesResult(Set<DatabaseInfo> databases) {
-      this.databases = databases;
-    }
-
-    public Set<DatabaseInfo> getDatabases() {
-      return databases;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
index 1855afc..1d9744b 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
@@ -57,7 +57,6 @@ import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
 import org.apache.ambari.view.hive20.utils.HiveActorConfiguration;
-import org.apache.ambari.view.hive20.utils.MetaDataManagerEventSubmitter;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.hive.jdbc.HiveConnection;
 import org.slf4j.Logger;
@@ -290,9 +289,6 @@ public class JdbcConnector extends HiveActor {
     LOG.info("Finished processing SQL statements for Job id : {}", jobId.or("SYNC JOB"));
     if (isAsync() && jobId.isPresent()) {
       updateJobStatus(jobId.get(), Job.JOB_STATE_FINISHED);
-
-      LOG.info("Sending event to refresh meta information for user {} and instance {}", username, instanceName);
-      MetaDataManagerEventSubmitter.sendDBRefresh(username, instanceName);
     }
 
     if (resultSetOptional.isPresent()) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java
index 600ea64..2c8a65d 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java
@@ -72,9 +72,9 @@ public class LogAggregator extends HiveActor {
       try {
         getMoreLogs();
       } catch (SQLException e) {
-        LOG.error("SQL Error while getting logs. Tried writing to: {}", logFile);
+        LOG.warn("SQL Error while getting logs. Tried writing to: {}. Exception: {}", logFile, e.getMessage());
       } catch (HdfsApiException e) {
-        LOG.warn("HDFS Error while getting writing logs to {}", logFile);
+        LOG.warn("HDFS Error while writing logs to {}. Exception: {}", logFile, e.getMessage());
 
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataManager.java
deleted file mode 100644
index 525ec0d..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataManager.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.actor;
-
-import akka.actor.ActorRef;
-import akka.actor.Cancellable;
-import akka.actor.Props;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive20.actor.message.HiveMessage;
-import org.apache.ambari.view.hive20.actor.message.Ping;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import scala.concurrent.duration.Duration;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-/**
- * Manages the Meta Information for Hive Server. Singleton actor which stores several DatabaseManagerActor in memory for
- * each user and instance name combination.
- */
-public class MetaDataManager extends HiveActor {
-
-  private final Logger LOG = LoggerFactory.getLogger(getClass());
-
-  /**
-   * Stores the sub database manager actors per user combination
-   */
-  private final Map<String, ActorRef> databaseManagers = new HashMap<>();
-  private final Map<String, Cancellable> terminationSchedulers = new HashMap<>();
-  private final ViewContext context;
-
-  public MetaDataManager(ViewContext context) {
-    this.context = context;
-  }
-
-  @Override
-  public void handleMessage(HiveMessage hiveMessage) {
-
-    Object message = hiveMessage.getMessage();
-    if (message instanceof Ping) {
-      handlePing((Ping) message);
-    } else if (message instanceof Terminate) {
-      handleTerminate((Terminate) message);
-    } else if (message instanceof DatabaseManager.GetDatabases) {
-      handleGetDatabases((DatabaseManager.GetDatabases) message);
-    }
-  }
-
-  private void handlePing(Ping message) {
-    LOG.info("Ping message received for user: {}, instance: {}", message.getUsername(), message.getInstanceName());
-    ActorRef databaseManager = databaseManagers.get(message.getUsername());
-    if (databaseManager == null) {
-      databaseManager = createDatabaseManager(message.getUsername(), message.getInstanceName());
-      databaseManagers.put(message.getUsername(), databaseManager);
-      databaseManager.tell(new DatabaseManager.Refresh(message.getUsername()), getSelf());
-    } else {
-      if(message.isImmediate()) {
-        databaseManager.tell(new DatabaseManager.Refresh(message.getUsername(), false), getSelf());
-      }
-      cancelTerminationScheduler(message.getUsername());
-    }
-    scheduleTermination(message.getUsername());
-  }
-
-  private void handleTerminate(Terminate message) {
-    ActorRef databaseManager = databaseManagers.remove(message.username);
-    getContext().stop(databaseManager);
-    cancelTerminationScheduler(message.getUsername());
-  }
-
-  private void handleGetDatabases(DatabaseManager.GetDatabases message) {
-    String username = message.getUsername();
-    ActorRef databaseManager = databaseManagers.get(username);
-    if(databaseManager != null) {
-      databaseManager.tell(message, getSender());
-    } else {
-      // Not database Manager created. Start the database manager with a ping message
-      // and queue up the GetDatabases call to self
-      getSelf().tell(new Ping(username, context.getInstanceName()), getSender());
-      getSelf().tell(message, getSender());
-    }
-  }
-
-  private void cancelTerminationScheduler(String username) {
-    Cancellable cancellable = terminationSchedulers.remove(username);
-    if (!(cancellable == null || cancellable.isCancelled())) {
-      LOG.info("Cancelling termination scheduler");
-      cancellable.cancel();
-    }
-  }
-
-  private void scheduleTermination(String username) {
-    Cancellable cancellable = context().system().scheduler().scheduleOnce(Duration.create(2, TimeUnit.MINUTES),
-        getSelf(), new Terminate(username), getContext().dispatcher(), getSelf());
-    terminationSchedulers.put(username, cancellable);
-  }
-
-  private ActorRef createDatabaseManager(String username, String instanceName) {
-    LOG.info("Creating database manager for username: {}, instance: {}", username, instanceName);
-    return context().actorOf(DatabaseManager.props(context));
-  }
-
-  public static Props props(ViewContext viewContext) {
-    return Props.create(MetaDataManager.class, viewContext);
-  }
-
-  private class Terminate {
-    public final String username;
-
-    public Terminate(String username) {
-      this.username = username;
-    }
-
-    public String getUsername() {
-      return username;
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataRetriever.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataRetriever.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataRetriever.java
deleted file mode 100644
index 64cd69c..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataRetriever.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.actor;
-
-import akka.actor.Props;
-import com.google.common.base.Optional;
-import org.apache.ambari.view.hive20.actor.message.HiveMessage;
-import org.apache.ambari.view.hive20.internal.Connectable;
-import org.apache.ambari.view.hive20.internal.ConnectionException;
-import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
-import org.apache.ambari.view.hive20.internal.dto.TableInfo;
-import org.apache.hive.jdbc.HiveConnection;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.HashSet;
-import java.util.Set;
-
-/**
- *
- */
-public class MetaDataRetriever extends HiveActor {
-
-  private final Logger LOG = LoggerFactory.getLogger(getClass());
-
-  private final Connectable connectable;
-
-  public MetaDataRetriever(Connectable connectable) {
-    this.connectable = connectable;
-  }
-
-  @Override
-  public void handleMessage(HiveMessage hiveMessage) {
-    Object message = hiveMessage.getMessage();
-    if (message instanceof RefreshDB) {
-      handleRefreshDB();
-    }
-  }
-
-  private void handleRefreshDB() {
-    try {
-      refreshDatabaseInfos();
-    } catch (ConnectionException | SQLException e) {
-      LOG.error("Failed to update the complete database information. Exception: {}", e);
-      getSender().tell(new DBRefreshFailed(e), getSelf());
-    }
-  }
-
-  private HiveConnection getHiveConnection() throws ConnectionException {
-    if (!connectable.isOpen()) {
-      connectable.connect();
-    }
-    Optional<HiveConnection> connectionOptional = connectable.getConnection();
-    return connectionOptional.get();
-  }
-
-  private void refreshDatabaseInfos() throws ConnectionException, SQLException {
-    HiveConnection connection = getHiveConnection();
-    Set<DatabaseInfo> infos = new HashSet<>();
-    try (ResultSet schemas = connection.getMetaData().getSchemas()) {
-      while (schemas.next()) {
-        DatabaseInfo info = new DatabaseInfo(schemas.getString(1));
-        infos.add(info);
-      }
-    }
-
-    getSender().tell(new DBRefreshed(infos), getSelf());
-
-    for (DatabaseInfo info : infos) {
-      refreshTablesInfo(info.getName());
-    }
-  }
-
-  private void refreshTablesInfo(String database) throws ConnectionException, SQLException {
-    HiveConnection connection = getHiveConnection();
-    Set<String> currentTableNames = new HashSet<>();
-    try (ResultSet tables = connection.getMetaData().getTables("", database, null, null)) {
-      while (tables.next()) {
-        TableInfo info = new TableInfo(tables.getString(3), tables.getString(4));
-        currentTableNames.add(info.getName());
-        getSender().tell(new TableRefreshed(info, database), getSelf());
-      }
-    }
-    getSender().tell(new AllTableRefreshed(database, currentTableNames), getSelf());
-  }
-
-  public static  Props props(Connectable connectable) {
-    return Props.create(MetaDataRetriever.class, connectable);
-  }
-
-
-  public static class RefreshDB {
-
-  }
-
-  public static class DBRefreshed {
-    private final Set<DatabaseInfo> databases;
-
-    public DBRefreshed(Set<DatabaseInfo> databases) {
-      this.databases = databases;
-    }
-
-    public Set<DatabaseInfo> getDatabases() {
-      return databases;
-    }
-  }
-
-  public static class DBRefreshFailed {
-    private final Exception exception;
-
-    public DBRefreshFailed(Exception exception) {
-      this.exception = exception;
-    }
-
-    public Exception getException() {
-      return exception;
-    }
-  }
-
-  public static  class TableRefreshed {
-    private final TableInfo table;
-    private final String database;
-
-    public TableRefreshed(TableInfo table, String database) {
-      this.table = table;
-      this.database = database;
-    }
-
-    public TableInfo getTable() {
-      return table;
-    }
-
-    public String getDatabase() {
-      return database;
-    }
-  }
-
-  public static class AllTableRefreshed {
-    private final String database;
-    private final Set<String> currentTableNames;
-
-    public AllTableRefreshed(String database, Set<String> currentTableNames) {
-      this.database = database;
-      this.currentTableNames = currentTableNames;
-    }
-
-    public String getDatabase() {
-      return database;
-    }
-
-    public Set<String> getCurrentTableNames() {
-      return currentTableNames;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/TableChangeNotifier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/TableChangeNotifier.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/TableChangeNotifier.java
deleted file mode 100644
index 0581618..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/TableChangeNotifier.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.actor;
-
-import akka.actor.Props;
-import org.apache.ambari.view.hive20.actor.message.HiveMessage;
-import org.apache.ambari.view.hive20.internal.dto.TableInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- *
- */
-public class TableChangeNotifier extends HiveActor {
-  private final Logger LOG = LoggerFactory.getLogger(getClass());
-
-  @Override
-  public void handleMessage(HiveMessage hiveMessage) {
-    Object message = hiveMessage.getMessage();
-    if(message instanceof TableUpdated) {
-      handleTableUpdated((TableUpdated) message);
-    } else if(message instanceof TableAdded) {
-      handleTableAdded((TableAdded) message);
-    } else if(message instanceof TableRemoved) {
-      handleTableRemoved((TableRemoved) message);
-    }
-  }
-
-  private void handleTableUpdated(TableUpdated message) {
-    LOG.info("Tables updated for table name: {}", message.getTableInfo().getName());
-  }
-
-  private void handleTableAdded(TableAdded message) {
-    LOG.info("Tables added for table name: {}", message.getTableInfo().getName());
-  }
-
-  private void handleTableRemoved(TableRemoved message) {
-    LOG.info("Tables removed for table name: {}", message.getTableName());
-  }
-
-  public static Props props() {
-    return Props.create(TableChangeNotifier.class);
-  }
-
-
-  public static class TableAdded {
-    private final TableInfo tableInfo;
-    public TableAdded(TableInfo tableInfo) {
-      this.tableInfo = tableInfo;
-    }
-
-    public TableInfo getTableInfo() {
-      return tableInfo;
-    }
-  }
-
-  public static class TableRemoved {
-    private final String tableName;
-    public TableRemoved(String tableName) {
-      this.tableName = tableName;
-    }
-
-    public String getTableName() {
-      return tableName;
-    }
-  }
-
-
-  public static class TableUpdated {
-    private final TableInfo tableInfo;
-    public TableUpdated(TableInfo tableInfo) {
-      this.tableInfo = tableInfo;
-    }
-
-    public TableInfo getTableInfo() {
-      return tableInfo;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Ping.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Ping.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Ping.java
deleted file mode 100644
index 61df87a..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Ping.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.actor.message;
-
-/**
- * Ping message
- */
-public class Ping {
-  private final String username;
-  private final String instanceName;
-  private final boolean immediate;
-
-  public Ping(String username, String instanceName) {
-    this(username, instanceName, false);
-  }
-
-  public Ping(String username, String instanceName, boolean immediate) {
-    this.username = username;
-    this.instanceName = instanceName;
-    this.immediate = immediate;
-  }
-
-  public String getUsername() {
-    return username;
-  }
-
-  public String getInstanceName() {
-    return instanceName;
-  }
-
-  public boolean isImmediate() {
-    return immediate;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java
index ffa9e132..baa82b4 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java
@@ -20,11 +20,14 @@ package org.apache.ambari.view.hive20.client;
 
 import java.util.List;
 
+import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
+import org.apache.ambari.view.hive20.internal.dto.TableInfo;
+
 public interface DDLDelegator {
 
-  List<String> getDbList(ConnectionConfig config, String like);
+  List<DatabaseInfo> getDbList(ConnectionConfig config, String like);
 
-  List<String> getTableList(ConnectionConfig config, String database, String like);
+  List<TableInfo> getTableList(ConnectionConfig config, String database, String like);
 
   List<Row> getTableDescriptionFormatted(ConnectionConfig config, String database, String table);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java
index 76c7c03..ef4f100 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java
@@ -18,15 +18,11 @@
 
 package org.apache.ambari.view.hive20.client;
 
-import akka.actor.ActorRef;
-import akka.actor.ActorSystem;
-import akka.actor.Inbox;
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.base.Optional;
-import com.google.common.collect.FluentIterable;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import javax.annotation.Nullable;
+
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive20.actor.message.Connect;
 import org.apache.ambari.view.hive20.actor.message.ExecuteJob;
@@ -40,15 +36,23 @@ import org.apache.ambari.view.hive20.actor.message.job.NoMoreItems;
 import org.apache.ambari.view.hive20.actor.message.job.NoResult;
 import org.apache.ambari.view.hive20.actor.message.job.Result;
 import org.apache.ambari.view.hive20.actor.message.job.ResultSetHolder;
+import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
+import org.apache.ambari.view.hive20.internal.dto.TableInfo;
 import org.apache.ambari.view.hive20.utils.HiveActorConfiguration;
 import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import scala.concurrent.duration.Duration;
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.base.Optional;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
 
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Inbox;
+import scala.concurrent.duration.Duration;
 
 public class DDLDelegatorImpl implements DDLDelegator {
 
@@ -69,15 +73,29 @@ public class DDLDelegatorImpl implements DDLDelegator {
   }
 
   @Override
-  public List<String> getDbList(ConnectionConfig config, String like) {
+  public List<DatabaseInfo> getDbList(ConnectionConfig config, String like) {
     Optional<Result> rowsFromDB = getRowsFromDB(config, getDatabaseListStatements(like));
-    return rowsFromDB.isPresent() ? getFirstColumnValues(rowsFromDB.get().getRows()) : Lists.<String>newArrayList();
+    List<String> databaseNames = rowsFromDB.isPresent() ? getFirstColumnValues(rowsFromDB.get().getRows()) : Lists.<String>newArrayList();
+    return FluentIterable.from(databaseNames).transform(new Function<String, DatabaseInfo>() {
+      @Nullable
+      @Override
+      public DatabaseInfo apply(@Nullable String databaseName) {
+        return new DatabaseInfo(databaseName);
+      }
+    }).toList();
   }
 
   @Override
-  public List<String> getTableList(ConnectionConfig config, String database, String like) {
+  public List<TableInfo> getTableList(ConnectionConfig config, String database, String like) {
     Optional<Result> rowsFromDB = getRowsFromDB(config, getTableListStatements(database, like));
-    return rowsFromDB.isPresent() ? getFirstColumnValues(rowsFromDB.get().getRows()) : Lists.<String>newArrayList();
+    List<String> tableNames = rowsFromDB.isPresent() ? getFirstColumnValues(rowsFromDB.get().getRows()) : Lists.<String>newArrayList();
+    return FluentIterable.from(tableNames).transform(new Function<String, TableInfo>() {
+      @Nullable
+      @Override
+      public TableInfo apply(@Nullable String tableName) {
+        return new TableInfo(tableName);
+      }
+    }).toList();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java
index 41be0a0..de282a7 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java
@@ -26,11 +26,9 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
  */
 public class TableInfo {
   private String name;
-  private String type;
 
-  public TableInfo(String name, String type) {
+  public TableInfo(String name) {
     this.name = name;
-    this.type = type;
   }
 
   public String getName() {
@@ -41,13 +39,6 @@ public class TableInfo {
     this.name = name;
   }
 
-  public String getType() {
-    return type;
-  }
-
-  public void setType(String type) {
-    this.type = type;
-  }
 
   @Override
   public boolean equals(Object o) {
@@ -73,7 +64,6 @@ public class TableInfo {
   public String toString() {
     return "TableInfo{" +
         "name='" + name + '\'' +
-        ", type='" + type + '\'' +
         '}';
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java
index 09e1ea9..47acc01 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java
@@ -24,7 +24,6 @@ package org.apache.ambari.view.hive20.internal.dto;
 public class TableResponse {
   private String id;
   private String name;
-  private String type;
   private String databaseId;
 
   public String getId() {
@@ -43,14 +42,6 @@ public class TableResponse {
     this.name = name;
   }
 
-  public String getType() {
-    return type;
-  }
-
-  public void setType(String type) {
-    this.type = type;
-  }
-
   public String getDatabaseId() {
     return databaseId;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
index f5ecdee..e433dc4 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
@@ -18,17 +18,14 @@
 
 package org.apache.ambari.view.hive20.resources.browser;
 
-import akka.actor.ActorRef;
-import akka.actor.ActorSystem;
-import akka.actor.Inbox;
 import com.google.common.base.Function;
 import com.google.common.base.Optional;
 import com.google.common.base.Predicate;
 import com.google.common.base.Strings;
 import com.google.common.collect.FluentIterable;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.ConnectionFactory;
 import org.apache.ambari.view.hive20.ConnectionSystem;
-import org.apache.ambari.view.hive20.actor.DatabaseManager;
 import org.apache.ambari.view.hive20.client.ConnectionConfig;
 import org.apache.ambari.view.hive20.client.DDLDelegator;
 import org.apache.ambari.view.hive20.client.DDLDelegatorImpl;
@@ -56,10 +53,8 @@ import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobController;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobResourceManager;
-import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import scala.concurrent.duration.Duration;
 
 import javax.annotation.Nullable;
 import javax.inject.Inject;
@@ -68,7 +63,6 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.concurrent.TimeUnit;
 
 /**
  *
@@ -93,21 +87,33 @@ public class DDLProxy {
   }
 
   public DatabaseResponse getDatabase(final String databaseId) {
-    Optional<DatabaseInfo> infoOptional = selectDatabase(databaseId);
-    if (!infoOptional.isPresent()) {
-      // Throw exception
-    }
+    DatabaseInfo dbInfo = new DatabaseInfo(databaseId);
+    List<TableInfo> tables = getTableInfos(databaseId);
+    dbInfo.setTables(new HashSet<>(tables));
 
-    return transformToDatabaseResponse(infoOptional.get());
+    return transformToDatabaseResponse(dbInfo);
   }
 
   public Set<TableResponse> getTables(final String databaseId) {
-    Optional<DatabaseInfo> infoOptional = selectDatabase(databaseId);
-    if (!infoOptional.isPresent()) {
-      // Throw exception;
-    }
-    DatabaseInfo info = infoOptional.get();
-    return transformToTablesResponse(info.getTables(), info.getName());
+    List<TableInfo> tables = getTableInfos(databaseId);
+
+    return FluentIterable.from(tables).transform(new Function<TableInfo, TableResponse>() {
+      @Nullable
+      @Override
+      public TableResponse apply(@Nullable TableInfo tableInfo) {
+        TableResponse response = new TableResponse();
+        response.setDatabaseId(databaseId);
+        response.setId(databaseId + "/" + tableInfo.getName());
+        response.setName(tableInfo.getName());
+        return response;
+      }
+    }).toSet();
+  }
+
+  private List<TableInfo> getTableInfos(String databaseId) {
+    ConnectionConfig hiveConnectionConfig = ConnectionFactory.create(context);
+    DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
+    return delegator.getTableList(hiveConnectionConfig, databaseId, "*");
   }
 
   public TableResponse getTable(final String databaseName, final String tableName) {
@@ -190,7 +196,6 @@ public class DDLProxy {
     TableResponse response = new TableResponse();
     response.setId(databaseName + "/" + tableInfo.getName());
     response.setName(tableInfo.getName());
-    response.setType(tableInfo.getType());
     response.setDatabaseId(databaseName);
     return response;
   }
@@ -205,26 +210,10 @@ public class DDLProxy {
   }
 
   private Set<DatabaseInfo> getDatabaseInfos() {
-    ActorRef metaDataManager = ConnectionSystem.getInstance().getMetaDataManager(context);
-    ActorSystem system = ConnectionSystem.getInstance().getActorSystem();
-
-    Inbox inbox = Inbox.create(system);
-
-    inbox.send(metaDataManager, new DatabaseManager.GetDatabases(context.getUsername()));
-    Object receive;
-    try {
-      receive = inbox.receive(Duration.create(60 * 1000, TimeUnit.MILLISECONDS));
-    } catch (Throwable ex) {
-      String errorMessage = "Query timed out to fetch databases information for user: " + context.getUsername();
-      LOG.error(errorMessage, ex);
-      throw new ServiceFormattedException(errorMessage, ex);
-    }
-    Set<DatabaseInfo> infos = new HashSet<>();
-
-    if (receive instanceof DatabaseManager.DatabasesResult) {
-      infos = ((DatabaseManager.DatabasesResult) receive).getDatabases();
-    }
-    return infos;
+    ConnectionConfig hiveConnectionConfig = ConnectionFactory.create(context);
+    DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
+    List<DatabaseInfo> databases = delegator.getDbList(hiveConnectionConfig, "*");
+    return new HashSet<>(databases);
   }
 
   public String generateCreateTableDDL(String databaseName, TableMeta tableMeta) throws ServiceException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
deleted file mode 100644
index 274ea20..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.resources.browser;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.ViewResourceHandler;
-import org.apache.ambari.view.hive20.BaseService;
-import org.apache.ambari.view.hive20.ConnectionSystem;
-import org.apache.ambari.view.hive20.client.ColumnDescription;
-import org.apache.ambari.view.hive20.client.ConnectionConfig;
-import org.apache.ambari.view.hive20.client.Cursor;
-import org.apache.ambari.view.hive20.client.DDLDelegator;
-import org.apache.ambari.view.hive20.client.DDLDelegatorImpl;
-import org.apache.ambari.view.hive20.client.Row;
-import org.apache.ambari.view.hive20.resources.jobs.ResultsPaginationController;
-import org.apache.ambari.view.hive20.utils.BadRequestFormattedException;
-import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.inject.Inject;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.util.List;
-import java.util.concurrent.Callable;
-
-/**
- * Database access resource
- */
-public class HiveBrowserService extends BaseService {
-  @Inject
-  ViewResourceHandler handler;
-  @Inject
-  protected ViewContext context;
-
-  protected final static Logger LOG =
-    LoggerFactory.getLogger(HiveBrowserService.class);
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response databases(@QueryParam("like") String like,
-                            @QueryParam("first") String fromBeginning,
-                            @QueryParam("count") Integer count,
-                            @QueryParam("columns") final String requestedColumns) {
-    if (like == null)
-      like = "*";
-    else
-      like = "*" + like + "*";
-    JSONObject response = new JSONObject();
-    ConnectionConfig hiveConnectionConfig = getHiveConnectionConfig();
-    DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
-    List<String> databases = delegator.getDbList(hiveConnectionConfig, like);
-    response.put("databases", databases);
-
-    return Response.ok(response).build();
-
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database.page")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response databasesPaginated(@QueryParam("like") String like,
-                                     @QueryParam("first") String fromBeginning,
-                                     @QueryParam("count") Integer count,
-                                     @QueryParam("searchId") String searchId,
-                                     @QueryParam("format") String format,
-                                     @QueryParam("columns") final String requestedColumns) {
-    if (like == null)
-      like = "*";
-    else
-      like = "*" + like + "*";
-    String curl = null;
-    try {
-      final String finalLike = like;
-      final DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
-      return ResultsPaginationController.getInstance(context)
-          .request("databases", searchId, false, fromBeginning, count, format, requestedColumns,
-            new Callable<Cursor<Row, ColumnDescription>>() {
-              @Override
-              public Cursor<Row, ColumnDescription> call() throws Exception {
-                return delegator.getDbListCursor(getHiveConnectionConfig(), finalLike);
-              }
-            }).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (IllegalArgumentException ex) {
-      throw new BadRequestFormattedException(ex.getMessage(), ex);
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
-    }
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database/{db}/table")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response tablesInDatabase(@PathParam("db") String db,
-                                   @QueryParam("like") String like,
-                                   @QueryParam("first") String fromBeginning,
-                                   @QueryParam("count") Integer count,
-                                   @QueryParam("columns") final String requestedColumns) {
-    if (like == null)
-      like = "*";
-    else
-      like = "*" + like + "*";
-
-    JSONObject response = new JSONObject();
-    DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
-    List<String> tables = delegator.getTableList(getHiveConnectionConfig(), db, like);
-    response.put("tables", tables);
-    response.put("database", db);
-    return Response.ok(response).build();
-
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database/{db}/table.page")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response tablesInDatabasePaginated(@PathParam("db") final String db,
-                                            @QueryParam("like") String like,
-                                            @QueryParam("first") String fromBeginning,
-                                            @QueryParam("count") Integer count,
-                                            @QueryParam("searchId") String searchId,
-                                            @QueryParam("format") String format,
-                                            @QueryParam("columns") final String requestedColumns) {
-    if (like == null)
-      like = "*";
-    else
-      like = "*" + like + "*";
-    String curl = null;
-    try {
-      final String finalLike = like;
-      final DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
-      try {
-        return ResultsPaginationController.getInstance(context)
-          .request(db + ":tables:", searchId, false, fromBeginning, count, format, requestedColumns,
-            new Callable<Cursor<Row, ColumnDescription>>() {
-              @Override
-              public Cursor<Row, ColumnDescription> call() throws Exception {
-                return delegator.getTableListCursor(getHiveConnectionConfig(), db, finalLike);
-              }
-            }).build();
-      } catch (Exception ex) {
-        throw new ServiceFormattedException(ex.getMessage(), ex);
-      }
-
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (IllegalArgumentException ex) {
-      throw new BadRequestFormattedException(ex.getMessage(), ex);
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
-    }
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database/{db}/table/{table}")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response describeTable(@PathParam("db") String db,
-                                @PathParam("table") String table,
-                                @QueryParam("like") String like,
-                                @QueryParam("columns") String requestedColumns,
-                                @QueryParam("extended") String extended) {
-    boolean extendedTableDescription = (extended != null && extended.equals("true"));
-    String curl = null;
-    try {
-      JSONObject response = new JSONObject();
-      DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
-      List<ColumnDescription> descriptions = delegator.getTableDescription(getHiveConnectionConfig(), db, table, "%", extendedTableDescription);
-      response.put("columns", descriptions);
-      response.put("database", db);
-      response.put("table", table);
-
-      //TODO: New implementation
-
-      return Response.ok(response).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (IllegalArgumentException ex) {
-      throw new BadRequestFormattedException(ex.getMessage(), ex);
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
-    }
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database/{db}/table/{table}.page")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response describeTablePaginated(@PathParam("db") final String db,
-                                         @PathParam("table") final String table,
-                                         @QueryParam("like") String like,
-                                         @QueryParam("first") String fromBeginning,
-                                         @QueryParam("searchId") String searchId,
-                                         @QueryParam("count") Integer count,
-                                         @QueryParam("format") String format,
-                                         @QueryParam("columns") final String requestedColumns) {
-    if (like == null)
-      like = ".*";
-    else
-      like = ".*" + like + ".*";
-    final String finalLike = like;
-
-    final DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
-    try {
-      return ResultsPaginationController.getInstance(context)
-        .request(db + ":tables:" + table + ":columns", searchId, false, fromBeginning, count, format, requestedColumns,
-          new Callable<Cursor<Row, ColumnDescription>>() {
-            @Override
-            public Cursor<Row, ColumnDescription> call() throws Exception {
-              return delegator.getTableDescriptionCursor(getHiveConnectionConfig(), db, table, finalLike, false);
-            }
-          }).build();
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java
index 1399ee4..dd5bb06 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java
@@ -18,20 +18,16 @@
 
 package org.apache.ambari.view.hive20.resources.system;
 
-import akka.actor.ActorRef;
-import org.apache.ambari.view.hive20.BaseService;
-import org.apache.ambari.view.hive20.ConnectionSystem;
-import org.apache.ambari.view.hive20.actor.message.Ping;
-import org.apache.ambari.view.hive20.resources.system.ranger.RangerService;
-import org.json.simple.JSONObject;
-
+import java.util.List;
 import javax.inject.Inject;
 import javax.ws.rs.GET;
-import javax.ws.rs.POST;
 import javax.ws.rs.Path;
 import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.Response;
-import java.util.List;
+
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.resources.system.ranger.RangerService;
+import org.json.simple.JSONObject;
 
 /**
  * System services which are required for the working of the application
@@ -45,20 +41,6 @@ public class SystemService extends BaseService {
     this.rangerService = rangerService;
   }
 
-  /**
-   * Clients should sent pings to the server at regular interval so that the system could keep alive stuffs or do
-   * cleanup work when the pings stops
-   * @return No content
-   */
-  @POST
-  @Path("ping")
-  public Response ping() {
-    //TODO: Change this to EventBus implementation
-    ActorRef metaDataManager = ConnectionSystem.getInstance().getMetaDataManager(context);
-    metaDataManager.tell(new Ping(context.getUsername(), context.getInstanceName()), ActorRef.noSender());
-    return Response.ok().status(Response.Status.NO_CONTENT).build();
-  }
-
 
   /**
    * Returns if the current user is a cluster operator or ambari administrator

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/MetaDataManagerEventSubmitter.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/MetaDataManagerEventSubmitter.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/MetaDataManagerEventSubmitter.java
deleted file mode 100644
index b23e06e..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/MetaDataManagerEventSubmitter.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.utils;
-
-import akka.actor.ActorRef;
-import com.google.common.base.Optional;
-import org.apache.ambari.view.hive20.ConnectionSystem;
-import org.apache.ambari.view.hive20.actor.message.Ping;
-
-/**
- * Static class to submit event to the MetaData Manager.
- */
-public final class MetaDataManagerEventSubmitter {
-
-  /**
-   * Send a ping message to the MetaDataManager Actor for that instance
-   * @param username Logged-in username
-   * @param instanceName current instance name
-   */
-  public static void sendDBRefresh(String username, String instanceName) {
-    Optional<ActorRef> metaDataManagerOptional = ConnectionSystem.getInstance().getMetaDataManagerIfPresent(instanceName);
-    if(metaDataManagerOptional.isPresent()) {
-      ActorRef metaDataManager = metaDataManagerOptional.get();
-      metaDataManager.tell(new Ping(username, instanceName, true), ActorRef.noSender());
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/adapters/ping.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/ping.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/ping.js
deleted file mode 100644
index f88cfed..0000000
--- a/contrib/views/hive20/src/main/resources/ui/app/adapters/ping.js
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import ApplicationAdapter from './application';
-
-export default ApplicationAdapter.extend({
-  ping() {
-    const url = this.urlForCreateRecord('ping');
-    return this.ajax(url, 'POST');
-  },
-
-  pathForType() {
-    return "system/ping";
-  },
-
-  fetchAuth(databaseName, tableName) {
-    const url = this.buildURL() + '/system/ranger/auth';
-    return this.ajax(url, "GET", {data: {database: databaseName, table: tableName}});
-  }
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/adapters/ranger.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/ranger.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/ranger.js
new file mode 100644
index 0000000..92b6472
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/ranger.js
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import ApplicationAdapter from './application';
+
+export default ApplicationAdapter.extend({
+
+  fetchAuth(databaseName, tableName) {
+    const url = this.buildURL() + '/system/ranger/auth';
+    return this.ajax(url, "GET", {data: {database: databaseName, table: tableName}});
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/models/table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/table.js b/contrib/views/hive20/src/main/resources/ui/app/models/table.js
index 3fdd21a..90400fe 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/models/table.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/table.js
@@ -20,14 +20,7 @@ import DS from 'ember-data';
 
 export default DS.Model.extend({
   name: DS.attr('string'),
-  type: DS.attr('string'),
   database: DS.belongsTo('database'),
   selected: false,
-  icon: Ember.computed('type', function() {
-    if(this.get('type').toLowerCase() === 'view') {
-      return "eye";
-    } else {
-      return "table";
-    }
-  })
+  icon: "table"
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/application.js b/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
index f4ceeb9..448fad2 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
@@ -21,12 +21,7 @@ import tabs from '../configs/top-level-tabs';
 import ENV from 'ui/config/environment';
 
 export default Ember.Route.extend({
-  keepAlive: Ember.inject.service('keep-alive'),
   serviceCheck: Ember.inject.service(),
-  init: function () {
-    this._super(...arguments);
-    this.get('keepAlive').initialize();
-  },
 
   beforeModel() {
     if (ENV.APP.SHOULD_PERFORM_SERVICE_CHECK && !this.get('serviceCheck.checkCompleted')) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/auth.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/auth.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/auth.js
index ec9d1a2..41b16be 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/auth.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/auth.js
@@ -22,6 +22,6 @@ export default TableMetaRouter.extend({
   model(params, transition) {
     let databaseName = transition.params['databases.database']['databaseId'];
     let tableName = transition.params['databases.database.tables.table']['name'];
-    return this.store.adapterFor('ping').fetchAuth(databaseName, tableName);
+    return this.store.adapterFor('ranger').fetchAuth(databaseName, tableName);
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/serializers/database.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/serializers/database.js b/contrib/views/hive20/src/main/resources/ui/app/serializers/database.js
new file mode 100644
index 0000000..99a5ef3
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/serializers/database.js
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DS from 'ember-data';
+
+export default DS.RESTSerializer.extend(DS.EmbeddedRecordsMixin, {
+  attrs: {
+    tables: {embedded: 'always'}
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/serializers/table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/serializers/table.js b/contrib/views/hive20/src/main/resources/ui/app/serializers/table.js
new file mode 100644
index 0000000..0b1aecd
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/serializers/table.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DS from 'ember-data';
+
+export default DS.RESTSerializer.extend(DS.EmbeddedRecordsMixin, {
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/services/keep-alive.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/keep-alive.js b/contrib/views/hive20/src/main/resources/ui/app/services/keep-alive.js
deleted file mode 100644
index 6bb12fb..0000000
--- a/contrib/views/hive20/src/main/resources/ui/app/services/keep-alive.js
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import Ember from 'ember';
-
-export default Ember.Service.extend({
-  store: Ember.inject.service(),
-  initialize: function() {
-    this.schedulePing();
-  },
-
-  schedulePing() {
-    this.get('store').adapterFor('ping').ping();
-    Ember.run.later(this.schedulePing.bind(this), 60000);
-  }
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/view.xml b/contrib/views/hive20/src/main/resources/view.xml
index 0dd3875..37ddf56 100644
--- a/contrib/views/hive20/src/main/resources/view.xml
+++ b/contrib/views/hive20/src/main/resources/view.xml
@@ -328,11 +328,6 @@
         <service-class>org.apache.ambari.view.hive20.resources.files.FileService</service-class>
     </resource>
 
-    <!--<resource>
-        <name>ddl</name>
-        <service-class>org.apache.ambari.view.hive2.resources.browser.HiveBrowserService</service-class>
-    </resource>-->
-
     <resource>
         <name>hive</name>
         <service-class>org.apache.ambari.view.hive20.HelpService</service-class>


[28/50] ambari git commit: AMBARI-20125 - DataNode Storage alert is duplicated (rzang)

Posted by nc...@apache.org.
AMBARI-20125 - DataNode Storage alert is duplicated (rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f936bcfd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f936bcfd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f936bcfd

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: f936bcfdb3cc6451bd1fa82e0d6e98f2aec285df
Parents: e9c9439
Author: Richard Zang <rz...@apache.org>
Authored: Wed Feb 22 18:20:53 2017 -0800
Committer: Richard Zang <rz...@apache.org>
Committed: Wed Feb 22 18:20:53 2017 -0800

----------------------------------------------------------------------
 ambari-web/app/controllers/main/service/info/summary.js | 12 ++++++++----
 1 file changed, 8 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f936bcfd/ambari-web/app/controllers/main/service/info/summary.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/summary.js b/ambari-web/app/controllers/main/service/info/summary.js
index 0ca9775..183d5df 100644
--- a/ambari-web/app/controllers/main/service/info/summary.js
+++ b/ambari-web/app/controllers/main/service/info/summary.js
@@ -392,17 +392,21 @@ App.MainServiceInfoSummaryController = Em.Controller.extend(App.WidgetSectionMix
             "isOK": [],
             "isUnknown": []
           };
-
+          var others = [];
           serviceDefinitions.forEach(function (definition) {
             definition.set('isCollapsed', true);
+            var pushed = false; // make sure each definition gets pushed only one time
             Object.keys(definitionTypes).forEach(function (type) {
-              if (definition.get(type)) {
+              if (!pushed && definition.get(type)) {
                 definitionTypes[type].push(definition);
-                serviceDefinitions = serviceDefinitions.without(definition);
+                pushed = true;
               }
             });
+            if (!pushed) {
+              others.push(definition);
+            }
           });
-          serviceDefinitions = definitionTypes.isCritical.concat(definitionTypes.isWarning, definitionTypes.isOK, definitionTypes.isUnknown, serviceDefinitions);
+          serviceDefinitions = definitionTypes.isCritical.concat(definitionTypes.isWarning, definitionTypes.isOK, definitionTypes.isUnknown, others);
 
           return serviceDefinitions;
         }.property('controller.content'),


[43/50] ambari git commit: Revert "AMBARI-20050. Issue while importing workflow with insufficient permissions.(Madhan Mohan Reddy via gauravn7)"

Posted by nc...@apache.org.
Revert "AMBARI-20050. Issue while importing workflow with insufficient permissions.(Madhan Mohan Reddy via gauravn7)"

This reverts commit 890ad905df0ae35c4f3d7ea6c4335fefdb05190b.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/986e7a9b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/986e7a9b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/986e7a9b

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 986e7a9be5302ae557c0fc0e82027de4bad6fd43
Parents: 5efa653
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Thu Feb 23 19:20:56 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Thu Feb 23 19:20:56 2017 +0530

----------------------------------------------------------------------
 .../apache/oozie/ambari/view/OozieDelegate.java |  12 +-
 .../ambari/view/OozieProxyImpersonator.java     | 381 ++++++++++++-------
 .../oozie/ambari/view/assets/AssetResource.java |  93 ++---
 .../WorkflowsManagerResource.java               |  36 +-
 .../ui/app/components/bundle-config.js          |   8 +-
 .../resources/ui/app/components/coord-config.js |   6 +-
 .../ui/app/components/designer-errors.js        |  49 ---
 .../ui/app/components/flow-designer.js          |  69 +++-
 .../src/main/resources/ui/app/styles/app.less   |  13 +-
 .../app/templates/components/bundle-config.hbs  |   4 +-
 .../app/templates/components/coord-config.hbs   |   2 +-
 .../templates/components/designer-errors.hbs    |  17 -
 .../app/templates/components/flow-designer.hbs  |  18 +-
 13 files changed, 378 insertions(+), 330 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java
index 6f3c4d2..55c4312 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java
@@ -32,8 +32,6 @@ import javax.ws.rs.core.Response;
 
 import org.apache.ambari.view.ViewContext;
 import org.apache.commons.io.IOUtils;
-import org.apache.oozie.ambari.view.exception.ErrorCode;
-import org.apache.oozie.ambari.view.exception.WfmException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -92,16 +90,10 @@ public class OozieDelegate {
 
     LOGGER.info("Resp from oozie status entity=="
       + serviceResponse.getEntity());
-    String oozieResp=null;
     if (serviceResponse.getEntity() instanceof String) {
-      oozieResp= (String) serviceResponse.getEntity();
+      return (String) serviceResponse.getEntity();
     } else {
-      oozieResp= serviceResponse.getEntity().toString();
-    }
-    if (oozieResp != null && oozieResp.trim().startsWith("{")) {
-      return  oozieResp;
-    }else{
-      throw new WfmException(oozieResp,ErrorCode.OOZIE_SUBMIT_ERROR);
+      return "success";
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
index c4e5bbd..6603a9c 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
@@ -49,10 +49,9 @@ import javax.ws.rs.core.UriInfo;
 import org.apache.ambari.view.ViewContext;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.hadoop.security.AccessControlException;
 import org.apache.oozie.ambari.view.assets.AssetResource;
-import org.apache.oozie.ambari.view.exception.ErrorCode;
-import org.apache.oozie.ambari.view.exception.WfmException;
-import org.apache.oozie.ambari.view.exception.WfmWebException;
 import org.apache.oozie.ambari.view.workflowmanager.WorkflowManagerService;
 import org.apache.oozie.ambari.view.workflowmanager.WorkflowsManagerResource;
 import org.slf4j.Logger;
@@ -84,7 +83,30 @@ public class OozieProxyImpersonator {
   private final OozieUtils oozieUtils = new OozieUtils();
   private final AssetResource assetResource;
 
+  private enum ErrorCodes {
+    OOZIE_SUBMIT_ERROR("error.oozie.submit", "Oozie Submit error"), OOZIE_IO_ERROR(
+      "error.oozie.io", "Oozie I/O error"), FILE_ACCESS_ACL_ERROR(
+      "error.file.access.control",
+      "Access Error to file due to access control"), FILE_ACCESS_UNKNOWN_ERROR(
+      "error.file.access", "Error accessing file"), WORKFLOW_PATH_EXISTS(
+      "error.workflow.path.exists", "Workflow Path exists"), WORKFLOW_XML_DOES_NOT_EXIST(
+      "error.workflow.xml.not.exists", "Workflow Xml does not exist");
+    private String errorCode;
+    private String description;
+
+    ErrorCodes(String errorCode, String description) {
+      this.errorCode = errorCode;
+      this.description = description;
+    }
+
+    public String getErrorCode() {
+      return errorCode;
+    }
 
+    public String getDescription() {
+      return description;
+    }
+  }
   private static enum WorkflowFormat{
     XML("xml"),
     DRAFT("draft");
@@ -117,23 +139,15 @@ public class OozieProxyImpersonator {
   @GET
   @Path("hdfsCheck")
   public Response hdfsCheck(){
-    try {
-      hdfsFileUtils.hdfsCheck();
-      return Response.ok().build();
-    }catch (Exception e){
-      throw new WfmWebException(e);
-    }
+    hdfsFileUtils.hdfsCheck();
+    return Response.ok().build();
   }
 
   @GET
   @Path("homeDirCheck")
   public Response homeDirCheck(){
-    try{
-      hdfsFileUtils.homeDirCheck();
-      return Response.ok().build();
-    }catch (Exception e){
-      throw new WfmWebException(e);
-    }
+    hdfsFileUtils.homeDirCheck();
+    return Response.ok().build();
   }
 
   @Path("/fileServices")
@@ -175,89 +189,57 @@ public class OozieProxyImpersonator {
                             @QueryParam("projectId") String projectId,
                             @DefaultValue("false") @QueryParam("overwrite") Boolean overwrite,
                             @QueryParam("description") String description,
-                            @QueryParam("jobType") String jobTypeString) {
+                            @QueryParam("jobType") String jobType) {
     LOGGER.info("submit workflow job called");
-    JobType jobType = JobType.valueOf(jobTypeString);
-    if (StringUtils.isEmpty(appPath)) {
-      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
-    }
-    appPath = workflowFilesService.getWorkflowFileName(appPath.trim(), jobType);
-    try {
-      if (!overwrite) {
-        boolean fileExists = hdfsFileUtils.fileExists(appPath);
-        if (fileExists) {
-          throw new WfmWebException(ErrorCode.WORKFLOW_PATH_EXISTS);
-        }
-      }
-      postBody = utils.formatXml(postBody);
-
-      String filePath = workflowFilesService.createFile(appPath, postBody, overwrite);
-      LOGGER.info(String.format("submit workflow job done. filePath=[%s]", filePath));
-
-      if (PROJ_MANAGER_ENABLED) {
-        String name = oozieUtils.deduceWorkflowNameFromXml(postBody);
-        workflowManagerService.saveWorkflow(projectId, appPath, jobType,
-          null, viewContext.getUsername(), name);
-      }
-      String response = oozieDelegate.submitWorkflowJobToOozie(headers,
-        appPath, ui.getQueryParameters(), jobType);
-      return Response.status(Status.OK).entity(response).build();
-    } catch (WfmWebException ex) {
-      throw ex;
-    } catch(WfmException ex){
-      throw new WfmWebException(ex,ex.getErrorCode());
-    } catch(Exception ex) {
-      throw new WfmWebException(ex);
-    }
+    return submitJobInternal(postBody, headers, ui, appPath, overwrite,
+      JobType.valueOf(jobType), projectId, description);
   }
 
   @POST
   @Path("/saveWorkflow")
   @Consumes({MediaType.TEXT_PLAIN + "," + MediaType.TEXT_XML})
   public Response saveWorkflow(String postBody, @Context HttpHeaders headers,
-                               @Context UriInfo ui, @QueryParam("app.path") String appPath,
-                               @QueryParam("jobType") String jobTypeStr,
+                               @Context UriInfo ui, @QueryParam("app.path") String appPath, @QueryParam("jobType") String jobTypeStr,
                                @DefaultValue("false") @QueryParam("overwrite") Boolean overwrite) {
     LOGGER.info("save workflow  called");
     if (StringUtils.isEmpty(appPath)) {
-      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
+      throw new RuntimeException("app path can't be empty.");
     }
     JobType jobType = StringUtils.isEmpty(jobTypeStr) ? JobType.WORKFLOW : JobType.valueOf(jobTypeStr);
     String workflowFilePath = workflowFilesService.getWorkflowFileName(appPath.trim(), jobType);
-    try {
-      if (!overwrite) {
-        boolean fileExists = hdfsFileUtils.fileExists(workflowFilePath);
-        if (fileExists) {
-          throw new WfmWebException(ErrorCode.WORKFLOW_PATH_EXISTS);
-        }
+    if (!overwrite) {
+      boolean fileExists = hdfsFileUtils.fileExists(workflowFilePath);
+      if (fileExists) {
+        return getFileExistsResponse();
       }
+    }
+
+    try {
       if (utils.isXml(postBody)) {
         saveWorkflowXml(jobType, appPath, postBody, overwrite);
       } else {
         saveDraft(jobType, appPath, postBody, overwrite);
       }
       if (PROJ_MANAGER_ENABLED) {
-        workflowManagerService.saveWorkflow(null, workflowFilePath, jobType, null,
+        workflowManagerService.saveWorkflow(null, workflowFilePath,
+          jobType, null,
           viewContext.getUsername(), getWorkflowName(postBody));
       }
-    } catch (WfmWebException ex) {
-      throw ex;
-    } catch (Exception ex) {
-       throw new WfmWebException(ex);
+    } catch (IOException ex) {
+      return getRespCodeForException(ex);
     }
+
     return Response.ok().build();
   }
-
-  private String getWorkflowName(String postBody) {
+  private String getWorkflowName(String postBody){
     if (utils.isXml(postBody)) {
       return oozieUtils.deduceWorkflowNameFromXml(postBody);
-    } else {
+    }else{
       return oozieUtils.deduceWorkflowNameFromJson(postBody);
     }
   }
 
-  private void saveWorkflowXml(JobType jobType, String appPath, String postBody,
-                               Boolean overwrite) throws IOException {
+  private void saveWorkflowXml(JobType jobType, String appPath, String postBody, Boolean overwrite) throws IOException {
     appPath = workflowFilesService.getWorkflowFileName(appPath.trim(), jobType);
     postBody = utils.formatXml(postBody);
     workflowFilesService.createFile(appPath, postBody, overwrite);
@@ -285,47 +267,49 @@ public class OozieProxyImpersonator {
                                @DefaultValue("false") @QueryParam("overwrite") Boolean overwrite) {
     LOGGER.info("publish asset called");
     if (StringUtils.isEmpty(uploadPath)) {
-      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
+      throw new RuntimeException("upload path can't be empty.");
     }
     uploadPath = uploadPath.trim();
-    try {
-      Map<String, String> validateAsset = assetResource.validateAsset(headers, postBody,
-        ui.getQueryParameters());
-      if (!STATUS_OK.equals(validateAsset.get(STATUS_KEY))) {
-        WfmWebException wfmEx=new WfmWebException(ErrorCode.INVALID_ASSET_INPUT);
-        wfmEx.setAdditionalDetail(validateAsset.get(MESSAGE_KEY));
-        throw wfmEx;
-      }
-      return saveAsset(postBody, uploadPath, overwrite);
-    } catch (WfmWebException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new WfmWebException(ex);
+    Map<String, String> validateAsset = assetResource.validateAsset(headers, postBody,
+      ui.getQueryParameters());
+    if (!STATUS_OK.equals(validateAsset.get(STATUS_KEY))) {
+      return Response.status(Status.BAD_REQUEST).entity(
+        validateAsset.get(MESSAGE_KEY)).build();
     }
+    return saveAsset(postBody, uploadPath, overwrite);
   }
 
-  private Response saveAsset(String postBody, String uploadPath, Boolean overwrite) throws IOException {
+  private Response saveAsset(String postBody, String uploadPath,
+                             Boolean overwrite) {
     uploadPath = workflowFilesService.getAssetFileName(uploadPath);
     if (!overwrite) {
       boolean fileExists = hdfsFileUtils.fileExists(uploadPath);
       if (fileExists) {
-        throw new WfmWebException(ErrorCode.WORKFLOW_PATH_EXISTS);
+        return getFileExistsResponse();
       }
     }
     postBody = utils.formatXml(postBody);
-    String filePath = workflowFilesService.createAssetFile(uploadPath, postBody, overwrite);
-    LOGGER.info(String.format("publish asset job done. filePath=[%s]", filePath));
-    return Response.ok().build();
+    try {
+      String filePath = workflowFilesService.createAssetFile(uploadPath,
+        postBody, overwrite);
+      LOGGER.info(String.format("publish asset job done. filePath=[%s]",
+        filePath));
+      return Response.ok().build();
+    } catch (Exception ex) {
+      LOGGER.error(ex.getMessage(), ex);
+      return getRespCodeForException(ex);
+    }
   }
-
   @GET
   @Path("/readAsset")
-  public Response readAsset(@QueryParam("assetPath") String assetPath) {
+  public Response readAsset(
+          @QueryParam("assetPath") String assetPath) {
     if (StringUtils.isEmpty(assetPath)) {
-      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
+      throw new RuntimeException("assetPath can't be empty.");
     }
     try {
-      final InputStream is = workflowFilesService.readAssset(assetPath);
+      final InputStream is = workflowFilesService
+              .readAssset(assetPath);
       StreamingOutput streamer = new StreamingOutput() {
         @Override
         public void write(OutputStream os) throws IOException,
@@ -336,16 +320,17 @@ public class OozieProxyImpersonator {
         }
       };
       return Response.ok(streamer).status(200).build();
-    } catch (IOException ex) {
-      throw new WfmWebException(ex);
+    } catch (IOException e) {
+      return getRespCodeForException(e);
     }
   }
 
+
   @GET
   @Path("/readWorkflowDraft")
   public Response readDraft(@QueryParam("workflowXmlPath") String workflowPath) {
     if (StringUtils.isEmpty(workflowPath)) {
-      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
+      throw new RuntimeException("workflowXmlPath can't be empty.");
     }
     try {
       final InputStream is = workflowFilesService.readDraft(workflowPath);
@@ -359,37 +344,113 @@ public class OozieProxyImpersonator {
         }
       };
       return Response.ok(streamer).status(200).build();
-    } catch (IOException ex) {
-      throw new WfmWebException(ex);
+    } catch (IOException e) {
+      return getRespCodeForException(e);
     }
   }
 
   @POST
   @Path("/discardWorkflowDraft")
   public Response discardDraft(
-    @QueryParam("workflowXmlPath") String workflowPath) {
+    @QueryParam("workflowXmlPath") String workflowPath)
+    throws IOException {
+    workflowFilesService.discardDraft(workflowPath);
+    return Response.ok().build();
+  }
+
+  private Response submitJobInternal(String postBody, HttpHeaders headers,
+                                     UriInfo ui, String appPath, Boolean overwrite, JobType jobType,
+                                     String projectId, String description) {
+    if (StringUtils.isEmpty(appPath)) {
+      throw new RuntimeException("app path can't be empty.");
+    }
+    appPath = workflowFilesService.getWorkflowFileName(appPath.trim(), jobType);
+    if (!overwrite) {
+      boolean fileExists = hdfsFileUtils.fileExists(appPath);
+      if (fileExists) {
+        return getFileExistsResponse();
+      }
+    }
+    postBody = utils.formatXml(postBody);
     try {
-      workflowFilesService.discardDraft(workflowPath);
-      return Response.ok().build();
-    } catch (IOException ex) {
-      throw new WfmWebException(ex);
+      String filePath = workflowFilesService.createFile(appPath, postBody,
+        overwrite);
+      LOGGER.info(String.format(
+        "submit workflow job done. filePath=[%s]", filePath));
+    } catch (Exception ex) {
+      LOGGER.error(ex.getMessage(), ex);
+      return getRespCodeForException(ex);
+
+    }
+    if (PROJ_MANAGER_ENABLED) {
+      String name = oozieUtils.deduceWorkflowNameFromXml(postBody);
+      workflowManagerService.saveWorkflow(projectId, appPath, jobType,
+        "todo description", viewContext.getUsername(), name);
+    }
+
+    String response = oozieDelegate.submitWorkflowJobToOozie(headers,
+      appPath, ui.getQueryParameters(), jobType);
+    if (response != null && response.trim().startsWith("{")) {
+      // dealing with oozie giving error but with 200 response.
+      return Response.status(Response.Status.OK).entity(response).build();
+    } else {
+      HashMap<String, String> resp = new HashMap<String, String>();
+      resp.put("status", ErrorCodes.OOZIE_SUBMIT_ERROR.getErrorCode());
+      resp.put("message", response);
+      return Response.status(Response.Status.BAD_REQUEST).entity(resp)
+        .build();
     }
+
+  }
+
+  private Response getRespCodeForException(Exception ex) {
+    if (ex instanceof AccessControlException) {
+      HashMap<String, String> errorDetails = getErrorDetails(
+        ErrorCodes.FILE_ACCESS_ACL_ERROR.getErrorCode(),
+        ErrorCodes.FILE_ACCESS_ACL_ERROR.getDescription(), ex);
+      return Response.status(Response.Status.BAD_REQUEST)
+        .entity(errorDetails).build();
+    } else if (ex instanceof IOException) {
+      HashMap<String, String> errorDetails = getErrorDetails(
+        ErrorCodes.FILE_ACCESS_UNKNOWN_ERROR.getErrorCode(),
+        ErrorCodes.FILE_ACCESS_UNKNOWN_ERROR.getDescription(), ex);
+      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
+        .entity(errorDetails).build();
+    } else {
+      HashMap<String, String> errorDetails = getErrorDetails(
+        ErrorCodes.FILE_ACCESS_UNKNOWN_ERROR.getErrorCode(),
+        ErrorCodes.FILE_ACCESS_UNKNOWN_ERROR.getDescription(), ex);
+      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
+        .entity(errorDetails).build();
+    }
+
+  }
+
+  private Response getFileExistsResponse() {
+    HashMap<String, String> resp = new HashMap<String, String>();
+    resp.put("status", ErrorCodes.WORKFLOW_PATH_EXISTS.getErrorCode());
+    resp.put("message", ErrorCodes.WORKFLOW_PATH_EXISTS.getDescription());
+    return Response.status(Response.Status.BAD_REQUEST).entity(resp)
+      .build();
   }
 
   @GET
   @Path("/readWorkflow")
   public Response readWorkflow(
     @QueryParam("workflowPath") String workflowPath, @QueryParam("jobType") String jobTypeStr) {
-    try {
-      String workflowFileName = workflowFilesService.getWorkflowFileName(workflowPath, JobType.valueOf(jobTypeStr));
-      if (!hdfsFileUtils.fileExists(workflowFileName)) {
-        throw new WfmWebException(ErrorCode.WORKFLOW_XML_DOES_NOT_EXIST);
-      }
-      WorkflowFileInfo workflowDetails = workflowFilesService
-        .getWorkflowDetails(workflowPath, JobType.valueOf(jobTypeStr));
-      if (workflowPath.endsWith(Constants.WF_DRAFT_EXTENSION) || workflowDetails.getIsDraftCurrent()) {
-        String filePath = workflowFilesService.getWorkflowDraftFileName(workflowPath, JobType.valueOf(jobTypeStr));
+    String workflowFileName=workflowFilesService.getWorkflowFileName(workflowPath, JobType.valueOf(jobTypeStr));
+    if (!hdfsFileUtils.fileExists(workflowFileName)){
+      HashMap<String,String> response=new HashMap<>();
+      response.put("status", ErrorCodes.WORKFLOW_XML_DOES_NOT_EXIST.getErrorCode());
+      response.put("message", ErrorCodes.WORKFLOW_XML_DOES_NOT_EXIST.getDescription());
+      return Response.status(Status.BAD_REQUEST).entity(response).build();
+    }
 
+    WorkflowFileInfo workflowDetails = workflowFilesService
+      .getWorkflowDetails(workflowPath, JobType.valueOf(jobTypeStr));
+    if (workflowPath.endsWith(Constants.WF_DRAFT_EXTENSION) || workflowDetails.getIsDraftCurrent()) {
+      String filePath = workflowFilesService.getWorkflowDraftFileName(workflowPath, JobType.valueOf(jobTypeStr));
+      try {
         InputStream inputStream = workflowFilesService.readWorkflowXml(filePath);
         String stringResponse = IOUtils.toString(inputStream);
         if (!workflowFilesService.isDraftFormatCurrent(stringResponse)) {
@@ -398,35 +459,36 @@ public class OozieProxyImpersonator {
         } else {
           return Response.ok(stringResponse).header(RESPONSE_TYPE, WorkflowFormat.DRAFT.getValue()).build();
         }
-      } else {
-        String filePath = workflowFilesService.getWorkflowFileName(workflowPath, JobType.valueOf(jobTypeStr));
-        return getWorkflowResponse(filePath, WorkflowFormat.XML.getValue(), false);
+      } catch (IOException e) {
+        return getRespCodeForException(e);
       }
-    } catch (WfmWebException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new WfmWebException(ex);
+    } else {
+      String filePath = workflowFilesService.getWorkflowFileName(workflowPath, JobType.valueOf(jobTypeStr));
+      return getWorkflowResponse(filePath, WorkflowFormat.XML.getValue(), false);
     }
   }
 
-  private Response getWorkflowResponse(String filePath, String responseType,
-                                       boolean olderFormatDraftIngored) throws IOException {
-    final InputStream is = workflowFilesService.readWorkflowXml(filePath);
-    StreamingOutput streamer = new StreamingOutput() {
-      @Override
-      public void write(OutputStream os) throws IOException,
-        WebApplicationException {
-        IOUtils.copy(is, os);
-        is.close();
-        os.close();
+  private Response getWorkflowResponse(String filePath, String responseType, boolean olderFormatDraftIngored) {
+    try {
+      final InputStream is = workflowFilesService
+        .readWorkflowXml(filePath);
+      StreamingOutput streamer = new StreamingOutput() {
+        @Override
+        public void write(OutputStream os) throws IOException,
+          WebApplicationException {
+          IOUtils.copy(is, os);
+          is.close();
+          os.close();
+        }
+      };
+      Response.ResponseBuilder responseBuilder = Response.ok(streamer).header(RESPONSE_TYPE, responseType);
+      if(olderFormatDraftIngored){
+        responseBuilder.header(OLDER_FORMAT_DRAFT_INGORED,Boolean.TRUE.toString());
       }
-    };
-    Response.ResponseBuilder responseBuilder = Response.ok(streamer).header(RESPONSE_TYPE, responseType);
-    if (olderFormatDraftIngored) {
-      responseBuilder.header(OLDER_FORMAT_DRAFT_INGORED, Boolean.TRUE.toString());
+      return  responseBuilder.build();
+    } catch (IOException e) {
+      return getRespCodeForException(e);
     }
-    return responseBuilder.build();
-
   }
 
   @GET
@@ -434,13 +496,12 @@ public class OozieProxyImpersonator {
   public Response readWorkflowXml(
     @QueryParam("workflowXmlPath") String workflowPath,@QueryParam("jobType") String jobTypeStr) {
     if (StringUtils.isEmpty(workflowPath)) {
-      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
+      throw new RuntimeException("workflowXmlPath can't be empty.");
     }
+
     try {
-      if (!hdfsFileUtils.fileExists(workflowPath)) {
-        throw new WfmWebException(ErrorCode.WORKFLOW_XML_DOES_NOT_EXIST);
-      }
-      final InputStream is = workflowFilesService.readWorkflowXml(workflowPath);
+      final InputStream is = workflowFilesService
+        .readWorkflowXml(workflowPath);
       StreamingOutput streamer = new StreamingOutput() {
         @Override
         public void write(OutputStream os) throws IOException,
@@ -451,13 +512,24 @@ public class OozieProxyImpersonator {
         }
       };
       return Response.ok(streamer).status(200).build();
-    } catch (WfmWebException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new WfmWebException(ex);
+    } catch (IOException e) {
+      return getRespCodeForException(e);
     }
   }
 
+  private HashMap<String, String> getErrorDetails(String status,
+                                                  String message, Exception ex) {
+    HashMap<String, String> resp = new HashMap<String, String>();
+    resp.put("status", status);
+    if (message != null) {
+      resp.put("message", message);
+    }
+    if (ex != null) {
+      resp.put("stackTrace", ExceptionUtils.getFullStackTrace(ex));
+    }
+    return resp;
+  }
+
   @GET
   @Path("/{path: .*}")
   public Response handleGet(@Context HttpHeaders headers, @Context UriInfo ui) {
@@ -466,7 +538,8 @@ public class OozieProxyImpersonator {
         .getPath(), ui.getQueryParameters(), HttpMethod.GET, null);
     } catch (Exception ex) {
       LOGGER.error("Error in GET proxy", ex);
-      throw new WfmWebException(ex);
+      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
+        .entity(getErrorDetailsForException("Oozie", ex)).build();
     }
   }
 
@@ -475,11 +548,13 @@ public class OozieProxyImpersonator {
   public Response handlePost(String xml, @Context HttpHeaders headers,
                              @Context UriInfo ui) {
     try {
+
       return oozieDelegate.consumeService(headers, ui.getAbsolutePath()
         .getPath(), ui.getQueryParameters(), HttpMethod.POST, xml);
     } catch (Exception ex) {
       LOGGER.error("Error in POST proxy", ex);
-      throw new WfmWebException(ex);
+      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
+        .entity(getErrorDetailsForException("Oozie", ex)).build();
     }
   }
 
@@ -492,7 +567,8 @@ public class OozieProxyImpersonator {
         .getPath(), ui.getQueryParameters(), HttpMethod.POST, null);
     } catch (Exception ex) {
       LOGGER.error("Error in DELETE proxy", ex);
-      throw new WfmWebException(ex);
+      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
+        .entity(getErrorDetailsForException("Oozie", ex)).build();
     }
   }
 
@@ -505,7 +581,22 @@ public class OozieProxyImpersonator {
         .getPath(), ui.getQueryParameters(), HttpMethod.PUT, body);
     } catch (Exception ex) {
       LOGGER.error("Error in PUT proxy", ex);
-      throw new WfmWebException(ex);
+      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
+        .entity(getErrorDetailsForException("Oozie", ex)).build();
+    }
+  }
+
+  private Map<String, String> getErrorDetailsForException(String component,
+                                                          Exception ex) {
+    String errorCode = component + "exception";
+    String errorMessage = component + " Exception";
+    if (ex instanceof RuntimeException) {
+      Throwable cause = ex.getCause();
+      if (cause instanceof IOException) {
+        errorCode = component + "io.exception";
+        errorMessage = component + "IO Exception";
+      }
     }
+    return getErrorDetails(errorCode, errorMessage, ex);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
index 3355c85..ef3b508 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
@@ -25,15 +25,13 @@ import org.apache.oozie.ambari.view.*;
 import org.apache.oozie.ambari.view.assets.model.ActionAsset;
 import org.apache.oozie.ambari.view.assets.model.ActionAssetDefinition;
 import org.apache.oozie.ambari.view.assets.model.AssetDefintion;
-import org.apache.oozie.ambari.view.exception.ErrorCode;
-import org.apache.oozie.ambari.view.exception.WfmException;
-import org.apache.oozie.ambari.view.exception.WfmWebException;
 import org.apache.oozie.ambari.view.model.APIResult;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import javax.ws.rs.*;
 import javax.ws.rs.core.*;
+import javax.ws.rs.core.Response.Status;
 import java.io.IOException;
 import java.util.*;
 
@@ -67,7 +65,7 @@ public class AssetResource {
       result.setData(assets);
       return Response.ok(result).build();
     } catch (Exception e) {
-      throw new WfmWebException(e);
+      throw new ServiceFormattedException(e);
     }
   }
 
@@ -82,7 +80,7 @@ public class AssetResource {
       result.setData(assets);
       return Response.ok(result).build();
     } catch (Exception e) {
-      throw new WfmWebException(e);
+      throw new ServiceFormattedException(e);
     }
   }
   @POST
@@ -90,20 +88,19 @@ public class AssetResource {
                             @QueryParam("id") String id, @Context UriInfo ui, String body) {
     try {
       Gson gson = new Gson();
-      AssetDefintion assetDefinition = gson.fromJson(body, AssetDefintion.class);
+      AssetDefintion assetDefinition = gson.fromJson(body,
+        AssetDefintion.class);
       Map<String, String> validateAsset = validateAsset(headers,
         assetDefinition.getDefinition(), ui.getQueryParameters());
       if (!STATUS_OK.equals(validateAsset.get(STATUS_KEY))) {
-        throw new WfmWebException(ErrorCode.ASSET_INVALID_FROM_OOZIE);
+        return Response.status(Status.BAD_REQUEST).build();
       }
       assetService.saveAsset(id, viewContext.getUsername(), assetDefinition);
       APIResult result = new APIResult();
       result.setStatus(APIResult.Status.SUCCESS);
       return Response.ok(result).build();
-    } catch (WfmWebException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new WfmWebException(ex);
+    } catch (Exception e) {
+      throw new ServiceFormattedException(e);
     }
   }
 
@@ -116,50 +113,43 @@ public class AssetResource {
   public Map<String, String> validateAsset(HttpHeaders headers,
                                            String postBody, MultivaluedMap<String, String> queryParams) {
     String workflowXml = oozieUtils.generateWorkflowXml(postBody);
-    Map<String, String> result = new HashMap<>();
-    String tempWfPath = "/tmp" + "/tmpooziewfs/tempwf_" + Math.round(Math.random() * 100000) + ".xml";
     try {
+      Map<String, String> result = new HashMap<>();
+      String tempWfPath = "/tmp" + "/tmpooziewfs/tempwf_" + Math.round(Math.random()*100000) + ".xml";
       hdfsFileUtils.writeToFile(tempWfPath, workflowXml, true);
-    } catch (IOException e) {
-      throw new WfmWebException(e, ErrorCode.FILE_ACCESS_UNKNOWN_ERROR);
-    }
-    queryParams.put("oozieparam.action", getAsList("dryrun"));
-    queryParams.put("oozieconfig.rerunOnFailure", getAsList("false"));
-    queryParams.put("oozieconfig.useSystemLibPath", getAsList("true"));
-    queryParams.put("resourceManager", getAsList("useDefault"));
-    String dryRunResp = oozieDelegate.submitWorkflowJobToOozie(headers,
-      tempWfPath, queryParams, JobType.WORKFLOW);
-    LOGGER.info(String.format("resp from validating asset=[%s]", dryRunResp));
-    try {
+      queryParams.put("oozieparam.action", getAsList("dryrun"));
+      queryParams.put("oozieconfig.rerunOnFailure", getAsList("false"));
+      queryParams.put("oozieconfig.useSystemLibPath", getAsList("true"));
+      queryParams.put("resourceManager", getAsList("useDefault"));
+      String dryRunResp = oozieDelegate.submitWorkflowJobToOozie(headers,
+        tempWfPath, queryParams, JobType.WORKFLOW);
+      LOGGER.info(String.format("resp from validating asset=[%s]",
+        dryRunResp));
       hdfsFileUtils.deleteFile(tempWfPath);
-    } catch (IOException e) {
-      throw new WfmWebException(e, ErrorCode.FILE_ACCESS_UNKNOWN_ERROR);
-    }
-    if (dryRunResp != null && dryRunResp.trim().startsWith("{")) {
-      JsonElement jsonElement = new JsonParser().parse(dryRunResp);
-      JsonElement idElem = jsonElement.getAsJsonObject().get("id");
-      if (idElem != null) {
-        result.put(STATUS_KEY, STATUS_OK);
+      if (dryRunResp != null && dryRunResp.trim().startsWith("{")) {
+        JsonElement jsonElement = new JsonParser().parse(dryRunResp);
+        JsonElement idElem = jsonElement.getAsJsonObject().get("id");
+        if (idElem != null) {
+          result.put(STATUS_KEY, STATUS_OK);
+        } else {
+          result.put(STATUS_KEY, STATUS_FAILED);
+          result.put(MESSAGE_KEY, dryRunResp);
+        }
       } else {
         result.put(STATUS_KEY, STATUS_FAILED);
         result.put(MESSAGE_KEY, dryRunResp);
       }
-    } else {
-      result.put(STATUS_KEY, STATUS_FAILED);
-      result.put(MESSAGE_KEY, dryRunResp);
+      return result;
+    } catch (IOException e) {
+      throw new RuntimeException(e);
     }
-    return result;
   }
 
   @GET
   @Path("/assetNameAvailable")
   public Response assetNameAvailable(@QueryParam("name") String name){
-    try {
-      boolean available = assetService.isAssetNameAvailable(name);
-      return Response.ok(available).build();
-    }catch (Exception e){
-      throw new WfmWebException(e);
-    }
+    boolean available=assetService.isAssetNameAvailable(name);
+    return Response.ok(available).build();
   }
 
   @GET
@@ -172,7 +162,7 @@ public class AssetResource {
       result.setData(assetDefinition);
       return Response.ok(result).build();
     } catch (Exception e) {
-      throw new WfmWebException(e);
+      throw new ServiceFormattedException(e);
     }
   }
 
@@ -180,13 +170,14 @@ public class AssetResource {
   @Path("/definition/id}")
   public Response getAssetDefinition(@PathParam("defnitionId") String id) {
     try {
-      ActionAssetDefinition assetDefinition = assetService.getAssetDefinition(id);
+      ActionAssetDefinition assetDefinition = assetService
+        .getAssetDefinition(id);
       APIResult result = new APIResult();
       result.setStatus(APIResult.Status.SUCCESS);
       result.setData(assetDefinition);
       return Response.ok(result).build();
     } catch (Exception e) {
-      throw new WfmWebException(e);
+      throw new ServiceFormattedException(e);
     }
   }
 
@@ -196,19 +187,19 @@ public class AssetResource {
     try {
       ActionAsset asset = assetService.getAsset(id);
       if (asset == null) {
-        throw new WfmWebException(ErrorCode.ASSET_NOT_EXIST);
+        throw new RuntimeException("Asset doesnt exist");
       }
       if (!viewContext.getUsername().equals(asset.getOwner())){
-        throw new WfmWebException(ErrorCode.PERMISSION_ERROR);
+        throw new RuntimeException(
+          "Dont have permission to delete this asset");
       }
       assetService.deleteAsset(id);
       APIResult result = new APIResult();
       result.setStatus(APIResult.Status.SUCCESS);
       return Response.ok(result).build();
-    } catch (WfmWebException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new WfmWebException(ex);
+    } catch (Exception e) {
+      throw new ServiceFormattedException(e);
     }
   }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java
index e1a5808..a0aa234 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java
@@ -26,10 +26,8 @@ import javax.ws.rs.GET;
 import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
 
 import org.apache.ambari.view.ViewContext;
-import org.apache.oozie.ambari.view.exception.WfmWebException;
 
 public class WorkflowsManagerResource {
 	private final WorkflowManagerService workflowManagerService;
@@ -40,27 +38,19 @@ public class WorkflowsManagerResource {
 		this.workflowManagerService=new WorkflowManagerService(viewContext);
 	}
 
-  @GET
-  public Response getWorkflows() {
-    try {
-      HashMap<String, Object> result = new HashMap<>();
-      result.put("wfprojects", workflowManagerService.getAllWorkflows(viewContext.getUsername()));
-      return Response.ok(result).build();
-    } catch (Exception ex) {
-      throw new WfmWebException(ex);
-    }
-  }
-
-
-  @DELETE
+	@GET
+	public Map<String,Object> getWorkflows(){
+	    HashMap<String,Object> result=new HashMap<>();
+	    result.put("wfprojects", workflowManagerService.getAllWorkflows(viewContext.getUsername()));
+	    return result;
+	}
+	
+	
+	@DELETE
 	@Path("/{projectId}")
-	public Response deleteWorkflow(@PathParam("projectId") String id,
-                                 @DefaultValue("false") @QueryParam("deleteDefinition") Boolean deleteDefinition){
-	  try{
-      workflowManagerService.deleteWorkflow(id,deleteDefinition);
-      return Response.ok().build();
-    }catch (Exception ex) {
-      throw new WfmWebException(ex);
-    }
+	public void deleteWorkflow( @PathParam("projectId") String id,
+            @DefaultValue("false") @QueryParam("deleteDefinition") Boolean deleteDefinition){
+	    workflowManagerService.deleteWorkflow(id,deleteDefinition);
 	}
+	
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
index e94d51a..3ccbc07 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
@@ -156,12 +156,10 @@ export default Ember.Component.extend(Ember.Evented, Validations, {
       }
       this.set('bundleFilePath', filePath);
       this.set("isImporting", false);
-    }.bind(this)).catch(function(data){
-      console.error(data);
-      this.set("errorMsg", "There is some problem while importing.");
+    }.bind(this)).catch(function(e){
       this.set("isImporting", false);
       this.set("isImportingSuccess", false);
-      this.set("data", data);
+      throw new Error(e);
     }.bind(this));
   },
   getBundleFromJSON(draftBundle){
@@ -365,7 +363,7 @@ export default Ember.Component.extend(Ember.Evented, Validations, {
       }.bind(this)).catch(function(e){
         this.$('#loading').hide();
         this.get("errors").pushObject({'message' : 'Could not process coordinator from ' + e.path});
-        throw new Error(e);
+        throw new Error(e.trace);
       }.bind(this));
     },
     preview(){

http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
index 4a57e37..bbd619d 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
@@ -274,12 +274,10 @@ export default Ember.Component.extend(Validations, Ember.Evented, {
       }
       this.set('coordinatorFilePath', filePath);
       this.set("isImporting", false);
-    }.bind(this)).catch(function(data){
-      console.error(data);
-      this.set("errorMsg", "There is some problem while importing.");
+    }.bind(this)).catch(function(e){
       this.set("isImporting", false);
       this.set("isImportingSuccess", false);
-      this.set("data", data);
+      throw new Error(e);
     }.bind(this));
   },
   getCoordinatorFromJSON(draftCoordinator){

http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js
index fdb4f5e..7a7c38d 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js
@@ -18,53 +18,4 @@
 import Ember from 'ember';
 
 export default Ember.Component.extend({
-  showingStackTrace: false,
-  hasErrorMsg : Ember.computed('errorMsg', function() {
-    return !Ember.isBlank(this.get("errorMsg"));
-  }),
-  errorMsgDetails : Ember.computed('data.responseText', function() {
-    var jsonResponse = this.getparsedResponse();
-    if (jsonResponse.message) {
-      if (jsonResponse.message.indexOf('Permission denied') >= 0) {
-        return "Permission Denied";
-      }
-      return jsonResponse.message;
-    }
-    return "";
-  }),
-  stackTrace : Ember.computed('data.responseText', function() {
-      var jsonResponse = this.getparsedResponse();
-      var stackTraceMsg = jsonResponse.stackTrace;
-      if(!stackTraceMsg){
-        return "";
-      }
-      if (stackTraceMsg instanceof Array) {
-        return stackTraceMsg.join("").replace(/\tat /g, '&nbsp;&nbsp;&nbsp;&nbsp;at&nbsp;');
-      } else {
-        return stackTraceMsg.replace(/\tat /g, '<br/>&nbsp;&nbsp;&nbsp;&nbsp;at&nbsp;');
-      }
-  }),
-  isStackTraceAvailable : Ember.computed('stackTrace', function(){
-    return this.get('stackTrace') && this.get('stackTrace').length ? true : false;
-  }),
-  getparsedResponse() {
-    var response = this.get('data.responseText');
-    if (response) {
-      try {
-        return JSON.parse(response);
-      } catch(err){
-        return "";
-      }
-    }
-    return "";
-  },
-
-  actions: {
-    showStackTrace(){
-      this.set("showingStackTrace", !this.get("showingStackTrace"));
-    },
-    closeStackTrace(){
-      this.set("showingStackTrace", false);
-    }
-  }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
index de72c6d..f97add8 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
@@ -94,13 +94,13 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
   isWorkflowImporting: false,
   isAssetPublishing: false,
   errorMsg: "",
-  data : {
-    "responseText": ""
-  },
   shouldPersist : false,
   useCytoscape: Constants.useCytoscape,
   cyOverflow: {},
   clipboard : Ember.computed.alias('clipboardService.clipboard'),
+  isStackTraceVisible: false,
+  isStackTraceAvailable: false,
+  stackTrace:"",
   showingStreamImport:false,
   fileInfo:Ember.Object.create(),
   isDraft: false,
@@ -310,6 +310,24 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
   doValidation(){
     this.validate();
   },
+  getStackTrace(data){
+    if(data){
+     try{
+      var stackTraceMsg = JSON.parse(data).stackTrace;
+      if(!stackTraceMsg){
+        return "";
+      }
+     if(stackTraceMsg instanceof Array){
+       return stackTraceMsg.join("").replace(/\tat /g, '&nbsp;&nbsp;&nbsp;&nbsp;at&nbsp;');
+     } else {
+       return stackTraceMsg.replace(/\tat /g, '<br/>&nbsp;&nbsp;&nbsp;&nbsp;at&nbsp;');
+     }
+     } catch(err){
+       return "";
+     }
+    }
+    return "";
+  },
   importWorkflow(filePath){
     var self = this;
     this.set("isWorkflowImporting", true);
@@ -325,8 +343,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       this.set("workflowFilePath", filePath);
     }.bind(this)).catch(function(data){
       console.error(data);
-      self.set("errorMsg", "There is some problem while importing.");
-      self.set("data", data);
+      self.set("errorMsg", "There is some problem while importing.Please try again.");
+      self.showingErrorMsgInDesigner(data);
       self.set("isWorkflowImporting", false);
     });
   },
@@ -473,8 +491,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
     exportActionNodeXmlDefered.promise.then(function(data){
       self.set("isAssetPublishing", false);
     }.bind(this)).catch(function(data){
-      self.set("errorMsg", "There is some problem while publishing asset.");
-      self.set("data", data);
+      self.set("errorMsg", "There is some problem while publishing asset. Please try again.");
+      self.showingErrorMsgInDesigner(data);
       self.set("isAssetPublishing", false);
     });
 
@@ -721,6 +739,15 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       this.set("showingWorkflowConfigProps",true);
     }
   },
+  showingErrorMsgInDesigner(data){
+      var self = this, stackTraceMsg = self.getStackTrace(data.responseText);
+      if(stackTraceMsg.length){
+        self.set("stackTrace", stackTraceMsg);
+        self.set("isStackTraceAvailable", true);
+      } else {
+        self.set("isStackTraceAvailable", false);
+      }
+  },
   isDraftExists(path){
     var deferred = Ember.RSVP.defer(), url, self = this;
     if(!path){
@@ -806,6 +833,12 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       });
       reader.readAsText(file);
     },
+    showStackTrace(){
+      this.set("isStackTraceVisible", true);
+    },
+    hideStackTrace(){
+      this.set("isStackTraceVisible", false);
+    },
     showWorkflowSla (value) {
       this.set('showWorkflowSla', value);
     },
@@ -996,8 +1029,9 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
         actionSettingsXmlDefered.promise.then(function(data){
           this.importActionSettingsFromString(data);
         }.bind(this)).catch(function(data){
-          self.set("errorMsg", "There is some problem while importing asset.");
-          self.set("data", data);
+          console.error(data);
+          self.set("errorMsg", "There is some problem while importing asset.Please try again.");
+          self.showingErrorMsgInDesigner(data);
         });
       }
     },
@@ -1013,8 +1047,9 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
         actionSettingsXmlDefered.promise.then(function(data){
           this.importActionNodeFromString(data);
         }.bind(this)).catch(function(data){
-          self.set("errorMsg", "There is some problem while importing asset.");
-          self.set("data", data);
+          console.error(data);
+          self.set("errorMsg", "There is some problem while importing asset. Please try again.");
+          self.showingErrorMsgInDesigner(data);
         });
       }
     },
@@ -1149,9 +1184,9 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       saveAssetConfigDefered.promise.then(function(data){
         self.set("isAssetPublishing", false);
       }.bind(this)).catch(function(data){
-        self.set("errorMsg", "There is some problem while saving asset.");
-        self.set("data", data);
         self.set("isAssetPublishing", false);
+        self.set("errorMsg", "There is some problem while saving asset. Please try again.");
+        self.showingErrorMsgInDesigner(data);
       });
     },
     showAssetList(value) {
@@ -1169,9 +1204,9 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
         self.importActionSettingsFromString(importedAsset.definition);
         self.set("isAssetImporting", false);
       }.bind(this)).catch(function(data){
-        self.set("errorMsg", "There is some problem while importing asset.");
-        self.set("data", data);
         self.set("isAssetImporting", false);
+        self.set("errorMsg", "There is some problem while importing asset. Please try again.");
+        self.showingErrorMsgInDesigner(data);
       });
     },
     showAssetNodeList(value) {
@@ -1189,9 +1224,9 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
         self.importActionNodeFromString(importedAsset.definition);
         self.set("isAssetImporting", false);
       }.bind(this)).catch(function(data){
-        self.set("errorMsg", "There is some problem while importing asset.");
-        self.set("data", data);
         self.set("isAssetImporting", false);
+        self.set("errorMsg", "There is some problem while importing asset. Please try again.");
+        self.showingErrorMsgInDesigner(data);
       });
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
index a424049..e98d182 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
@@ -605,10 +605,9 @@ input:invalid {
 #configureJob .modal-dialog,
 #asset-delete-confirm-dialog .modal-dialog,
 #projectsList .modal-dialog,
-#previewModal .modal-dialog,
-#stack_trace_dialog .modal-dialog {
+#previewModal .modal-dialog {
     width: @modalDialogWidth;
-    height: 100vh;
+height: 100vh;
 }
 
 #collapseOne{
@@ -1548,11 +1547,15 @@ input:invalid {
   padding-left: 0px;
   padding-right: 0px;
 }
-
+#stackTrace{
+  white-space: pre-wrap;
+  max-width: 100%;
+  max-height: 400px;
+  overflow: scroll;
+}
 .jobIdClass {
   width: 50px;
 }
-
 .width50 {
     white-space: nowrap;
     width: 150px;

http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
index 2d374a5..ca58431 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
@@ -72,7 +72,7 @@
     <div id='loading'>
       {{spin-spinner lines=13 length=20 width=10}}
     </div>
-    {{designer-errors errors=errors validationErrors=validationErrors errorMsg=errorMsg data=data}}
+    {{designer-errors errors=errors}}
     <form class="form-horizontal">
       <div class="col-sm-12 paddingtop10">
         <div class="col-sm-8 centralize-panel">
@@ -100,7 +100,7 @@
                   <li class="list-group-item">No Coordinators Configured.</li>
                   {{/each}}
                 </ul>
-                {{#field-error model=this field='bundle.coordinators' showErrorMessage=showErrorMessage}}{{/field-error}}
+                {{#field-error model=this field='bundle.coordinators' showErrorMessage=true}}{{/field-error}}
 
               {{#if coordinatorCreateMode}}
               {{#bundle-coord-config coordinator=currentCoordinator openTab="openTab" openFileBrowser="openFileBrowser" add="addCoordinator" cancel="cancelCoordinatorOperation" createMode=coordinatorCreateMode}}{{/bundle-coord-config}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
index 7db5ce2..7b607ca 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
@@ -93,7 +93,7 @@
   {{spin-spinner lines=13 length=20 width=10}}
 </div>
 <div class="container-fluid">
-  {{designer-errors errors=errors validationErrors=validationErrors errorMsg=errorMsg data=data}}
+  {{designer-errors errors=errors}}
   <form class="form-horizontal">
     <div class="col-sm-12 paddingtop10">
       <div class="col-sm-8 centralize-panel">

http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs
index 00cb8a6..8438255 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs
@@ -34,21 +34,4 @@
       {{/if}}
     </div>
   {{/if}}
-
-  {{#if hasErrorMsg}}
-    <div id="loader">
-        <div id="alert"class="alert alert-danger alert-dismissible workflow-error" role="alert">
-            {{errorMsg}}
-            <div id="errorMsgDetails">
-              {{errorMsgDetails}}
-              {{#if isStackTraceAvailable}}
-                <a href="#" class="action-link" {{action "showStackTrace"}}>Details</a>
-              {{/if}}
-            </div>
-        </div>
-    </div>
-  {{/if}}
 </div>
-{{#if showingStackTrace}}
-  {{#stack-trace-dialog title="Stack Trace" stackTrace=stackTrace closeStackTrace="closeStackTrace"}}{{/stack-trace-dialog}}
-{{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/986e7a9b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
index 4ff9d87..1a73421 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
@@ -138,7 +138,7 @@
   {{/if}}
   <div  id="content" class="panel panel-default designer-main-panel col-xs-20">
     <div class="designer-panel designer-canvas">
-      {{designer-errors errors=errors validationErrors=validationErrors errorMsg=errorMsg data=data}}
+      {{designer-errors errors=errors validationErrors=validationErrors}}
       {{#if undoAvailable}}
         <div id="alert"class="alert alert-warning workflow-error" role="alert">
           {{#if (eq undoType 'nodeDeleted')}}
@@ -157,6 +157,22 @@
           </div>
       </div>
       {{/if}}
+      {{#if (not (eq errorMsg ""))}}
+        <div id="loader">
+            <div id="alert"class="alert alert-danger alert-dismissible workflow-error" role="alert">
+                {{errorMsg}}
+                {{#if isStackTraceAvailable}}
+                  {{#if isStackTraceVisible}}
+                    <a href="#" class="action-link" {{action "hideStackTrace"}}>Hide Log</a>
+                    <div id="stackTrace">{{{stackTrace}}}</div>
+                  {{/if}}
+                  {{#unless isStackTraceVisible}}
+                    <a href="#" class="action-link" {{action "showStackTrace"}}>Show Log</a>
+                  {{/unless}}
+                {{/if}}
+            </div>
+        </div>
+      {{/if}}
       {{#if isAssetPublishing}}
         <div id="loader">
             <div id="alert"class="alert alert-info alert-dismissible workflow-error" role="alert">


[05/50] ambari git commit: AMBARI-20036. Side Nav: implement the Services Actions - 2.(xiwang)

Posted by nc...@apache.org.
AMBARI-20036. Side Nav: implement the Services Actions - 2.(xiwang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/65d90175
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/65d90175
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/65d90175

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 65d901756c2b3bfb621bfd028094ffdbdcfca2fa
Parents: d7e9ef2
Author: Xi Wang <xi...@apache.org>
Authored: Tue Feb 21 16:34:21 2017 -0800
Committer: Xi Wang <xi...@apache.org>
Committed: Tue Feb 21 16:34:21 2017 -0800

----------------------------------------------------------------------
 ambari-web/vendor/scripts/theme/bootstrap-ambari.js | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/65d90175/ambari-web/vendor/scripts/theme/bootstrap-ambari.js
----------------------------------------------------------------------
diff --git a/ambari-web/vendor/scripts/theme/bootstrap-ambari.js b/ambari-web/vendor/scripts/theme/bootstrap-ambari.js
index 77973a9..1c81aeb 100644
--- a/ambari-web/vendor/scripts/theme/bootstrap-ambari.js
+++ b/ambari-web/vendor/scripts/theme/bootstrap-ambari.js
@@ -126,6 +126,7 @@
       });
       $moreActions.on('click', function () {
         if (settings.fitHeight) {
+          // set actions submenu position
           var $moreIcon = $(this);
           $moreIcon.children('.dropdown-menu').css('position', 'fixed');
           var offset = $moreIcon.offset();
@@ -133,8 +134,10 @@
           $moreIcon.children('.dropdown-menu').css('left', offset.left);
         }
       });
-      $moreActions.children('.dropdown-menu').mouseleave(function () {
-        $(this).parent().removeClass('open');
+      $moreActions.children('.dropdown-menu').on('click', function () {
+        // some action was triggered, should hide this icon
+        var moreIcon = $(this).parent();
+        setTimeout(function(){ moreIcon.hide(); }, 1000);
       });
       $navigationContainer.children('.side-nav-menu').scroll(function () {
         $moreActions.removeClass('open');


[23/50] ambari git commit: AMBARI-20009. HiveView2.0: Table list under DB panel on Query tab appears only if clicked on db name and Not clear how to set the database (Venkata Sairam via pallavkul)

Posted by nc...@apache.org.
AMBARI-20009. HiveView2.0: Table list under DB panel on Query tab appears only if clicked on db name and Not clear how to set the database (Venkata Sairam via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7cc2a205
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7cc2a205
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7cc2a205

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 7cc2a20525881b329abe50d98ce871b6c9753742
Parents: 569f566
Author: pallavkul <pa...@gmail.com>
Authored: Wed Feb 22 19:11:22 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Wed Feb 22 19:11:22 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/routes/queries/new.js |  1 -
 .../resources/ui/app/routes/queries/query.js    | 43 +++++++++++++-------
 2 files changed, 29 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7cc2a205/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
index 5a869c2..7bd2214 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
@@ -27,7 +27,6 @@ export default Ember.Route.extend({
       id: newWorksheetName,
       title: newWorksheetTitle,
       //query: 'select 1;',
-      selectedDb : 'default',
       //owner: 'admin',
       selected: true
     });

http://git-wip-us.apache.org/repos/asf/ambari/blob/7cc2a205/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index 7860af1..64f38ab 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -75,6 +75,7 @@ export default Ember.Route.extend(UILoggerMixin, {
     } else {
       this.transitionTo('queries.query' + lastResultRoute);
     }
+    return dbmodel;
   },
 
   model(params) {
@@ -93,24 +94,22 @@ export default Ember.Route.extend(UILoggerMixin, {
     this._super(...arguments);
     this.get("tezViewInfo").getTezViewInfo();
 
-    let self = this;
-    let alldatabases = this.store.findAll('database');
+    let self = this, selectedDb;
+    let alldatabases = this.store.peekAll('database');
     controller.set('alldatabases',alldatabases);
 
-    let selecteDBName = model.get('selectedDb');
+    selectedDb = this.checkIfDeafultDatabaseExists(alldatabases);
 
     let selectedTablesModels =[];
     let selectedMultiDb = [];
-
-    selectedTablesModels.pushObject(
-      {
-        'dbname': selecteDBName ,
-        'tables': this.store.query('table', {databaseId: selecteDBName}),
+    if(selectedDb) {
+      selectedTablesModels.pushObject({
+        'dbname': selectedDb ,
+        'tables': this.store.query('table', {databaseId: selectedDb}),
         'isSelected': true
-      }
-    )
-
-    selectedMultiDb.pushObject(selecteDBName);
+      })
+      selectedMultiDb.pushObject(selectedDb);
+    }
 
     controller.set('worksheet', model);
 
@@ -139,7 +138,23 @@ export default Ember.Route.extend(UILoggerMixin, {
     controller.set('tabs', tabs);
 
   },
-
+  checkIfDeafultDatabaseExists(alldatabases){
+    let defaultDB = alldatabases.findBy('name', 'default'), selectedDb;
+    if(defaultDB) {
+      selectedDb = defaultDB.get("name");
+      this.get('controller.model').set('selectedDb', selectedDb);
+    }
+    return selectedDb;
+  },
+  setSelectedDB(selectedDBs) {
+    let selectedDb = this.get('controller.model').get('selectedDb');
+    if(selectedDBs && selectedDBs.indexOf(selectedDb) === -1) {
+      this.get('controller.model').set('selectedDb', selectedDBs[0]);
+    }
+    else if(selectedDBs.length === 0) {
+      this.get('controller.model').set('selectedDb', null);
+    }
+  },
   actions: {
 
     resetDefaultWorksheet(){
@@ -161,7 +176,7 @@ export default Ember.Route.extend(UILoggerMixin, {
       let self = this;
       let selectedTablesModels =[];
       let selectedMultiDb = [];
-
+      this.setSelectedDB(selectedDBs);
       selectedDBs.forEach(function(db, index){
         selectedTablesModels.pushObject(
           {


[11/50] ambari git commit: AMBARI-20087. HiveView 2.0: Table search is case sensitive, show table only if name match exactly. (dipayanb)

Posted by nc...@apache.org.
AMBARI-20087. HiveView 2.0: Table search is case sensitive, show table only if name match exactly. (dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c655d7c0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c655d7c0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c655d7c0

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: c655d7c08edd82fa5de1a009a7af399dd4b09eb7
Parents: 4e7bf34
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Wed Feb 22 12:55:09 2017 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Wed Feb 22 12:55:36 2017 +0530

----------------------------------------------------------------------
 .../hive20/src/main/resources/ui/app/components/list-filter.js  | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c655d7c0/contrib/views/hive20/src/main/resources/ui/app/components/list-filter.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/list-filter.js b/contrib/views/hive20/src/main/resources/ui/app/components/list-filter.js
index d538aa3..db0b5c0 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/list-filter.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/list-filter.js
@@ -22,6 +22,7 @@ export default Ember.Component.extend({
   classNames: ['list-filter'],
   header: '',
   subHeader: '',
+  caseInsensitive: true,
   items: [],
   filterText: '',
   emptyFilterText: Ember.computed('filterText', function() {
@@ -29,7 +30,9 @@ export default Ember.Component.extend({
   }),
   filteredItems: Ember.computed('filterText', 'items.@each', function() {
     return this.get('items').filter((item) => {
-      return item.get('name').indexOf(this.get('filterText')) !== -1;
+      let filterText = this.get('caseInsensitive') ? this.get('filterText').toLowerCase() : this.get('filterText');
+      let itemName = this.get('caseInsensitive') ? item.get('name').toLowerCase() : item.get('name')
+      return itemName.indexOf(filterText) !== -1;
     });
   }),
 


[20/50] ambari git commit: AMBARI-20062. StackAdvisor reports error for missing YARN, MR, Hive queues while adding services post upgrade (dgrinenko via dlysnichenko)

Posted by nc...@apache.org.
AMBARI-20062. StackAdvisor reports error for missing YARN,MR,Hive queues while adding services post upgrade (dgrinenko via dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b789b7c1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b789b7c1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b789b7c1

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b789b7c102c164b3fab3275cebf33148a5755ee0
Parents: 309dbd7
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Wed Feb 22 13:52:17 2017 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Wed Feb 22 13:52:17 2017 +0200

----------------------------------------------------------------------
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  | 29 ++++++++++++++
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml | 30 +++++++++++++++
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |  8 ++++
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  | 29 ++++++++++++++
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml | 32 ++++++++++++++++
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     | 11 ++++++
 .../stacks/HDP/2.5/upgrades/config-upgrade.xml  | 40 ++++++++++++++++++++
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml | 32 +++++++++++++++-
 .../stacks/HDP/2.5/upgrades/upgrade-2.6.xml     |  8 ++++
 .../src/main/resources/stacks/stack_advisor.py  |  2 +-
 .../stacks/2.2/common/test_stack_advisor.py     | 20 +++-------
 .../stacks/2.5/common/test_stack_advisor.py     |  5 +--
 12 files changed, 226 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b789b7c1/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
index cdd701a..8589e2d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
@@ -96,6 +96,10 @@
             <type>tez-site</type>
             <set key="tez.lib.uris" value="/hdp/apps/${hdp.version}/tez/tez.tar.gz"/>
           </definition>
+          <definition xsi:type="configure" id="hdp_2_5_0_0_tez_queue_name">
+            <type>tez-site</type>
+            <set key="tez.queue.name" value="default" if-type="tez-site" if-key="tez.queue.name" if-key-state="absent"/>
+          </definition>
         </changes>
       </component>
     </service>
@@ -230,6 +234,12 @@
             <replace key="content" find="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.standard.MaxFileSize = {{webhcat_log_maxfilesize}}MB"/>
             <replace key="content" find="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.standard.MaxBackupIndex = {{webhcat_log_maxbackupindex}}"/>
           </definition>
+
+          <definition xsi:type="configure" id="hdp_2_6_0_0_templeton_hadoop_queue_name">
+            <type>webhcat-site</type>
+            <set key="templeton.hadoop.queue.name" value="default" if-type="webhcat-site" if-key="templeton.hadoop.queue.name" if-key-state="absent"/>
+          </definition>
+
         </changes>
       </component>
     </service>
@@ -423,6 +433,10 @@
             <type>spark-javaopts-properties</type>
             <transfer operation="delete" delete-key="content" />
           </definition>
+          <definition xsi:type="configure" id="hdp_2_5_0_0_spark_yarn_queue">
+            <type>spark-defaults</type>
+            <set key="spark.yarn.queue" value="default" if-type="spark-defaults" if-key="spark.yarn.queue" if-key-state="absent"/>
+          </definition>
         </changes>
       </component>
       <component name="SPARK_THRIFTSERVER">
@@ -530,6 +544,21 @@
                       to-key="yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled"
                       default-value="false"/>
           </definition>
+          <definition xsi:type="configure" id="hdp_2_6_0_0_service_check_queue_name">
+            <type>yarn-env</type>
+            <set key="service_check.queue.name" value="default" if-type="yarn-env" if-key="service_check.queue.name" if-key-state="absent"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="MAPREDUCE2">
+      <component name="MAPREDUCE2_CLIENT">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_0_0_mapreduce_job_queuename">
+            <type>mapred-site</type>
+            <set key="mapreduce.job.queuename" value="default" if-type="mapred-site" if-key="mapreduce.job.queuename" if-key-state="absent"/>
+          </definition>
         </changes>
       </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b789b7c1/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
index ff42022..d675986 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
@@ -326,12 +326,24 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for Resource Manager">
+        <task xsi:type="configure" id="hdp_2_6_0_0_service_check_queue_name">
+          <summary>Adding service check customization property</summary>
+        </task>
+      </execute-stage>
+
       <execute-stage service="MAPREDUCE2" component="MAPREDUCE2_CLIENT" title="Apply config changes for Mapreduce2 client">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath">
           <summary>Verifying LZO codec path for mapreduce</summary>
         </task>
       </execute-stage>
 
+      <execute-stage service="MAPREDUCE2" component="MAPREDUCE2_CLIENT" title="Apply config changes for Mapreduce2 client">
+        <task xsi:type="configure" id="hdp_2_6_0_0_mapreduce_job_queuename">
+          <summary>Adding queue customization property</summary>
+        </task>
+      </execute-stage>
+
       <!-- HBASE -->
       <!-- These HBASE configs changed in HDP 2.3.4.0, but Ambari can't distinguish HDP 2.3.2.0 vs HDP 2.3.4.0, so easier to always do them. -->
       <execute-stage service="HBASE" component="HBASE_MASTER" title="Apply config changes for HBASE Master">
@@ -366,6 +378,12 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="TEZ" component="TEZ_CLIENT" title="Apply config changes for Tez">
+        <task xsi:type="configure" id="hdp_2_5_0_0_tez_queue_name">
+          <summary>Add queue customization property</summary>
+        </task>
+      </execute-stage>
+
       <!-- SQOOP -->
       <execute-stage service="SQOOP" component="SQOOP" title="Apply config changes for Sqoop to remove Atlas Configs">
         <!-- Remove Atlas configs that were incorrectly added to sqoop-site instead of Atlas' application.properties. -->
@@ -388,6 +406,12 @@
         <task xsi:type="configure" id="hdp_2_5_0_0_webhcat_server_update_configuration_paths"/>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="WEBHCAT_SERVER" title="Apply config changes for WebHCat Server">
+        <task xsi:type="configure" id="hdp_2_6_0_0_templeton_hadoop_queue_name">
+          <summary>Adding queue customization setting</summary>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HIVE" component="WEBHCAT_SERVER" title="Parameterizing WebHCat Log4J Properties">
         <task xsi:type="configure" id="webhcat_log4j_parameterize">
           <summary>Updating the Webhcat Log4J properties to include parameterizations</summary>
@@ -484,6 +508,12 @@
         <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
       </execute-stage>
 
+      <execute-stage service="SPARK" component="SPARK_CLIENT" title="Apply config changes for Spark">
+        <task xsi:type="configure" id="hdp_2_5_0_0_spark_yarn_queue">
+          <summary>Add queue customization property</summary>
+        </task>
+      </execute-stage>
+
       <!-- RANGER -->
       <execute-stage service="RANGER" component="RANGER_ADMIN" title="Apply config changes for Ranger Admin">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_audit_db_flag"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b789b7c1/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
index b255bc0..9917ee1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
@@ -737,6 +737,7 @@
           <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath">
             <summary>Verifying LZO codec path for mapreduce</summary>
           </task>
+          <task xsi:type="configure" id="hdp_2_6_0_0_mapreduce_job_queuename"/>
         </pre-upgrade>
         
         <pre-downgrade copy-upgrade="true" />
@@ -765,6 +766,7 @@
           <task xsi:type="configure" id="yarn_log4j_parameterize" />
           <task xsi:type="configure" id="yarn_env_security_opts" />
           <task xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption" />
+          <task xsi:type="configure" id="hdp_2_6_0_0_service_check_queue_name"/>
         </pre-upgrade>
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
 
@@ -829,6 +831,10 @@
 
     <service name="TEZ">
       <component name="TEZ_CLIENT">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_5_0_0_tez_queue_name"/>
+        </pre-upgrade>
+        <pre-downgrade/>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -908,6 +914,7 @@
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_5_0_0_webhcat_server_update_configuration_paths"/>
           <task xsi:type="configure" id="webhcat_log4j_parameterize" />
+          <task xsi:type="configure" id="hdp_2_6_0_0_templeton_hadoop_queue_name"/>
         </pre-upgrade>
         
         <pre-downgrade />
@@ -967,6 +974,7 @@
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_4_0_0_remove_spark_properties_extraJavaOptions"/>
           <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
+          <task xsi:type="configure" id="hdp_2_5_0_0_spark_yarn_queue"/>
         </pre-upgrade>
 
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->

http://git-wip-us.apache.org/repos/asf/ambari/blob/b789b7c1/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index 1bea263..14feab6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -31,6 +31,11 @@
             <set key="tez.lib.uris" value="/hdp/apps/${hdp.version}/tez/tez.tar.gz"/>
           </definition>
 
+          <definition xsi:type="configure" id="hdp_2_5_0_0_tez_queue_name">
+            <type>tez-site</type>
+            <set key="tez.queue.name" value="default" if-type="tez-site" if-key="tez.queue.name" if-key-state="absent"/>
+          </definition>
+
         </changes>
       </component>
     </service>
@@ -121,6 +126,10 @@
             <replace key="content" find="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.standard.MaxFileSize = {{webhcat_log_maxfilesize}}MB"/>
             <replace key="content" find="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.standard.MaxBackupIndex = {{webhcat_log_maxbackupindex}}"/>
           </definition>
+          <definition xsi:type="configure" id="hdp_2_6_0_0_templeton_hadoop_queue_name">
+            <type>webhcat-site</type>
+            <set key="templeton.hadoop.queue.name" value="default" if-type="webhcat-site" if-key="templeton.hadoop.queue.name" if-key-state="absent"/>
+          </definition>
         </changes>
       </component>
     </service>
@@ -316,6 +325,10 @@
                       to-key="yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled"
                       default-value="false"/>
           </definition>
+          <definition xsi:type="configure" id="hdp_2_6_0_0_service_check_queue_name">
+            <type>yarn-env</type>
+            <set key="service_check.queue.name" value="default" if-type="yarn-env" if-key="service_check.queue.name" if-key-state="absent"/>
+          </definition>
         </changes>
       </component>
 
@@ -330,6 +343,17 @@
       </component>
     </service>
 
+    <service name="MAPREDUCE2">
+      <component name="MAPREDUCE2_CLIENT">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_6_0_0_mapreduce_job_queuename">
+            <type>mapred-site</type>
+            <set key="mapreduce.job.queuename" value="default" if-type="mapred-site" if-key="mapreduce.job.queuename" if-key-state="absent"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
     <service name="KAFKA">
       <component name="KAFKA_BROKER">
         <changes>
@@ -564,6 +588,11 @@
             <type>spark-defaults</type>
             <transfer operation="delete" delete-key="spark.yarn.max.executor.failures" />
           </definition>
+
+          <definition xsi:type="configure" id="hdp_2_5_0_0_spark_yarn_queue">
+            <type>spark-defaults</type>
+            <set key="spark.yarn.queue" value="default" if-type="spark-defaults" if-key="spark.yarn.queue" if-key-state="absent"/>
+          </definition>
         </changes>
       </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b789b7c1/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
index 86cd56a..e856288 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
@@ -301,6 +301,18 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for Resource Manager">
+        <task xsi:type="configure" id="hdp_2_6_0_0_service_check_queue_name">
+          <summary>Adding service check customization property</summary>
+        </task>
+      </execute-stage>
+
+      <execute-stage service="MAPREDUCE2" component="MAPREDUCE2_CLIENT" title="Apply config changes for Mapreduce2 client">
+        <task xsi:type="configure" id="hdp_2_6_0_0_mapreduce_job_queuename">
+          <summary>Adding queue customization property</summary>
+        </task>
+      </execute-stage>
+
       <!--TEZ-->
       <execute-stage service="TEZ" component="TEZ_CLIENT" title="Verify LZO codec path for Tez">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath">
@@ -312,6 +324,13 @@
         <task xsi:type="configure" id="hdp_2_5_0_0_tez_client_adjust_tez_lib_uris_property"/>
       </execute-stage>
 
+
+      <execute-stage service="TEZ" component="TEZ_CLIENT" title="Apply config changes for Tez">
+        <task xsi:type="configure" id="hdp_2_5_0_0_tez_queue_name">
+          <summary>Add queue customization property</summary>
+        </task>
+      </execute-stage>
+
       <!--OOZIE-->
       <execute-stage service="OOZIE" component="OOZIE_SERVER" title="Apply config changes for Oozie Server">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.OozieConfigCalculation">
@@ -422,6 +441,13 @@
         </task>
       </execute-stage>
 
+      <!--SPARK-->
+      <execute-stage service="SPARK" component="SPARK_CLIENT" title="Apply config changes for Spark">
+        <task xsi:type="configure" id="hdp_2_5_0_0_spark_yarn_queue">
+          <summary>Add queue customization property</summary>
+        </task>
+      </execute-stage>
+
       <!-- SQOOP -->
       <execute-stage service="SQOOP" component="SQOOP" title="Apply config changes for Sqoop to remove Atlas Configs">
         <!-- Remove Atlas configs that were incorrectly added to sqoop-site instead of Atlas' application.properties. -->
@@ -473,6 +499,12 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="WEBHCAT_SERVER" title="Apply config changes for WebHCat Server">
+        <task xsi:type="configure" id="hdp_2_6_0_0_templeton_hadoop_queue_name">
+          <summary>Adding queue customization setting</summary>
+        </task>
+      </execute-stage>
+
       <!-- HBASE -->
       <execute-stage service="HBASE" component="HBASE_MASTER" title="Apply config changes for Hbase Master">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_hbase_audit_db"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b789b7c1/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
index 6a8e9d7..f736796 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
@@ -742,6 +742,7 @@
           <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath">
             <summary>Verifying LZO codec path for mapreduce</summary>
           </task>
+          <task xsi:type="configure" id="hdp_2_6_0_0_mapreduce_job_queuename"/>
         </pre-upgrade>
         
         <pre-downgrade copy-upgrade="true" />
@@ -770,6 +771,7 @@
           <task xsi:type="configure" id="yarn_log4j_parameterize" />
           <task xsi:type="configure" id="yarn_env_security_opts" />
           <task xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption" />
+          <task xsi:type="configure" id="hdp_2_6_0_0_service_check_queue_name"/>
         </pre-upgrade>
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
 
@@ -832,6 +834,10 @@
 
     <service name="TEZ">
       <component name="TEZ_CLIENT">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_5_0_0_tez_queue_name"/>
+        </pre-upgrade>
+        <pre-downgrade/>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -906,6 +912,7 @@
       <component name="WEBHCAT_SERVER">
         <pre-upgrade>
           <task xsi:type="configure" id="webhcat_log4j_parameterize" />
+          <task xsi:type="configure" id="hdp_2_6_0_0_templeton_hadoop_queue_name"/>
         </pre-upgrade>
         <pre-downgrade/>
         <upgrade>
@@ -949,6 +956,10 @@
         </upgrade>
       </component>
       <component name="SPARK_CLIENT">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_5_0_0_spark_yarn_queue"/>
+        </pre-upgrade>
+        <pre-downgrade/>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b789b7c1/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
index ca9cf47..3069b34 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
@@ -84,7 +84,27 @@
           </definition>
         </changes>
       </component>
+      <component name="SPARK_CLIENT">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_5_0_0_spark_yarn_queue">
+            <type>spark-defaults</type>
+            <set key="spark.yarn.queue" value="default" if-type="spark-defaults" if-key="spark.yarn.queue" if-key-state="absent"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="TEZ">
+      <component name="TEZ_CLIENT">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_5_0_0_tez_queue_name">
+            <type>tez-site</type>
+            <set key="tez.queue.name" value="default" if-type="tez-site" if-key="tez.queue.name" if-key-state="absent"/>
+          </definition>
+        </changes>
+      </component>
     </service>
+
     <service name="ZOOKEEPER">
           <component name="ZOOKEEPER_SERVER">
             <changes>
@@ -151,9 +171,25 @@
           <set key="yarn.nodemanager.log-aggregation.num-log-files-per-app"
                value="336" />
         </definition>
+        <definition xsi:type="configure" id="hdp_2_6_0_0_service_check_queue_name">
+          <type>yarn-env</type>
+          <set key="service_check.queue.name" value="default" if-type="yarn-env" if-key="service_check.queue.name" if-key-state="absent"/>
+        </definition>
       </changes>
     </component>
   </service>
+
+  <service name="MAPREDUCE2">
+    <component name="MAPREDUCE2_CLIENT">
+      <changes>
+        <definition xsi:type="configure" id="hdp_2_6_0_0_mapreduce_job_queuename">
+          <type>mapred-site</type>
+          <set key="mapreduce.job.queuename" value="default" if-type="mapred-site" if-key="mapreduce.job.queuename" if-key-state="absent"/>
+        </definition>
+      </changes>
+    </component>
+  </service>
+
   <service name="HDFS">
     <component name="NAMENODE">
       <changes>
@@ -409,6 +445,10 @@
             <replace key="content" find="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.standard.MaxFileSize = {{webhcat_log_maxfilesize}}MB"/>
             <replace key="content" find="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.standard.MaxBackupIndex = {{webhcat_log_maxbackupindex}}"/>
           </definition>
+          <definition xsi:type="configure" id="hdp_2_6_0_0_templeton_hadoop_queue_name">
+            <type>webhcat-site</type>
+            <set key="templeton.hadoop.queue.name" value="default" if-type="webhcat-site" if-key="templeton.hadoop.queue.name" if-key-state="absent"/>
+          </definition>
         </changes>
       </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b789b7c1/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index 06cae78..9516b6a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -281,6 +281,12 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="MAPREDUCE2" component="MAPREDUCE2_CLIENT" title="Apply config changes for Mapreduce2 client">
+        <task xsi:type="configure" id="hdp_2_6_0_0_mapreduce_job_queuename">
+          <summary>Adding queue customization property</summary>
+        </task>
+      </execute-stage>
+
       <!--Yarn-->
       <execute-stage service="YARN" component="RESOURCEMANAGER" title="Parameterizing Yarn Log4J Properties Resource Manager">
         <task xsi:type="configure" id="yarn_log4j_parameterize">
@@ -302,6 +308,12 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for Resource Manager">
+        <task xsi:type="configure" id="hdp_2_6_0_0_service_check_queue_name">
+          <summary>Adding service check customization property</summary>
+        </task>
+      </execute-stage>
+
       <!--Yarn-->
       <execute-stage service="YARN" component="RESOURCEMANAGER" title="Apply config changes for YARN app log retaintion">
         <task xsi:type="configure" id="yarn_site_retained_log_count">
@@ -316,6 +328,12 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="TEZ" component="TEZ_CLIENT" title="Update Tez queue configuration">
+        <task xsi:type="configure" id="hdp_2_5_0_0_tez_queue_name">
+          <summary>Add queue customization property</summary>
+        </task>
+      </execute-stage>
+
       <!--OOZIE-->
       <execute-stage service="OOZIE" component="OOZIE_SERVER" title="Apply config changes for Oozie Server">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.OozieConfigCalculation">
@@ -466,6 +484,12 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="WEBHCAT_SERVER" title="Apply config changes for WebHCat Server">
+        <task xsi:type="configure" id="hdp_2_6_0_0_templeton_hadoop_queue_name">
+          <summary>Adding queue customization setting</summary>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HIVE" component="HIVE_SERVER" title="Appending heap dump options for Hive">
         <task xsi:type="configure" id="hdp_2_6_0_0_hive_append_heap_dump_options"/>
       </execute-stage>
@@ -507,7 +531,13 @@
       <!-- SPARK -->
       <execute-stage service="SPARK" component="LIVY_SERVER" title="Apply config changes for Livy Server">
         <task xsi:type="configure" id="hdp_2_5_0_0_rename_spark_livy_configs"/>
-      </execute-stage>      
+      </execute-stage>
+
+      <execute-stage service="SPARK" component="SPARK_CLIENT" title="Apply config changes for Spark">
+        <task xsi:type="configure" id="hdp_2_5_0_0_spark_yarn_queue">
+          <summary>Add queue customization property</summary>
+        </task>
+      </execute-stage>
     </group>
 
     <!--

http://git-wip-us.apache.org/repos/asf/ambari/blob/b789b7c1/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
index 64edbb8..04d603b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
@@ -650,6 +650,7 @@
           <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath">
             <summary>Verifying LZO codec path for mapreduce</summary>
           </task>
+          <task xsi:type="configure" id="hdp_2_6_0_0_mapreduce_job_queuename"/>
         </pre-upgrade>
         
         <pre-downgrade copy-upgrade="true" />
@@ -673,6 +674,7 @@
           <task xsi:type="configure" id="yarn_env_security_opts" />
           <task xsi:type="configure" id="hdp_2_6_0_0_yarn_priority_utilization_underutilized_preemption" />
           <task xsi:type="configure" id="yarn_site_retained_log_count" />
+          <task xsi:type="configure" id="hdp_2_6_0_0_service_check_queue_name"/>
         </pre-upgrade>
         <pre-downgrade />
         <upgrade>
@@ -729,6 +731,7 @@
           <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath">
             <summary>Verifying LZO codec path for Tez</summary>
           </task>
+          <task xsi:type="configure" id="hdp_2_5_0_0_tez_queue_name"/>
         </pre-upgrade>
         
         <pre-downgrade copy-upgrade="true" />
@@ -811,6 +814,7 @@
       <component name="WEBHCAT_SERVER">
         <pre-upgrade>
           <task xsi:type="configure" id="webhcat_log4j_parameterize" />
+          <task xsi:type="configure" id="hdp_2_6_0_0_templeton_hadoop_queue_name"/>
         </pre-upgrade>
         <pre-downgrade/>
         <upgrade>
@@ -854,6 +858,10 @@
         </upgrade>
       </component>
       <component name="SPARK_CLIENT">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_5_0_0_spark_yarn_queue"/>
+        </pre-upgrade>
+        <pre-downgrade/>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b789b7c1/ambari-server/src/main/resources/stacks/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/stack_advisor.py b/ambari-server/src/main/resources/stacks/stack_advisor.py
index abcd762..04c6baf 100644
--- a/ambari-server/src/main/resources/stacks/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/stack_advisor.py
@@ -2526,7 +2526,7 @@ class DefaultStackAdvisor(StackAdvisor):
   #region YARN and MAPREDUCE
   def validatorYarnQueue(self, properties, recommendedDefaults, propertyName, services):
     if propertyName not in properties:
-      return self.getErrorItem("Value should be set")
+      return None
 
     capacity_scheduler_properties, _ = self.getCapacitySchedulerProperties(services)
     leaf_queue_names = self.getAllYarnLeafQueues(capacity_scheduler_properties)

http://git-wip-us.apache.org/repos/asf/ambari/blob/b789b7c1/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index b8762b3..36936d5 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -3488,12 +3488,7 @@ class TestHDP22StackAdvisor(TestCase):
                   'tez.tez-ui.history-url.base' : 'http://host:8080/#/main/views/TEZ/0.7.0.2.3.0.0-2155/TEZ_CLUSTER_INSTANCE'}
 
 
-    res_expected = [{'config-name': 'tez.queue.name',
-                     'config-type': 'tez-site',
-                     'level': 'ERROR',
-                     'message': 'Value should be set',
-                     'type': 'configuration'},
-                    {'config-name': 'tez.tez-ui.history-url.base',
+    res_expected = [{'config-name': 'tez.tez-ui.history-url.base',
                      'config-type': 'tez-site',
                      'level': 'WARN',
                      'message': "It is recommended to set value https://host:8443/#/main/views/TEZ/0.7.0.2.3.0.0-2155/TEZ_CLUSTER_INSTANCE for property tez.tez-ui.history-url.base",
@@ -3510,7 +3505,7 @@ class TestHDP22StackAdvisor(TestCase):
                      'type': 'configuration'}]
 
     res = self.stackAdvisor.validateTezConfigurations(properties, recommendedDefaults, configurations, '', '')
-    self.assertEquals(res, res_expected)
+    self.assertEquals(res_expected, res)
 
 
   def test_validateHDFSConfigurationsEnv(self):
@@ -3622,11 +3617,6 @@ class TestHDP22StackAdvisor(TestCase):
                      'type': 'configuration',
                      'config-name': 'yarn.app.mapreduce.am.command-opts',
                      'level': 'WARN'},
-                    {'config-name': 'mapreduce.job.queuename',
-                     'config-type': 'mapred-site',
-                     'level': 'ERROR',
-                     'message': 'Value should be set',
-                     'type': 'configuration'},
                     {'config-type': 'mapred-site',
                      'message': 'yarn.app.mapreduce.am.command-opts Xmx should be less than yarn.app.mapreduce.am.resource.mb (410)',
                      'type': 'configuration',
@@ -3634,7 +3624,7 @@ class TestHDP22StackAdvisor(TestCase):
                      'level': 'WARN'}]
 
     res = self.stackAdvisor.validateMapReduce2Configurations(properties, recommendedDefaults, {}, '', '')
-    self.assertEquals(res, res_expected)
+    self.assertEquals(res_expected, res)
 
   def test_validateHiveConfigurationsEnv(self):
     properties = {"hive_security_authorization": "None"}
@@ -4320,7 +4310,7 @@ class TestHDP22StackAdvisor(TestCase):
     }
 
     # Test with ranger plugin enabled, validation fails
-    res_expected = [{'config-type': 'spark-defaults', 'message': 'Value should be set', 'type': 'configuration', 'config-name': 'spark.yarn.queue', 'level': 'ERROR'}]
+    res_expected = []
 
     res = self.stackAdvisor.validateSparkDefaults(properties, recommendedDefaults, configurations, services, {})
-    self.assertEquals(res, res_expected)
+    self.assertEquals(res_expected, res)

http://git-wip-us.apache.org/repos/asf/ambari/blob/b789b7c1/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index 176dd99..6890ef6 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -5388,11 +5388,10 @@ class TestHDP25StackAdvisor(TestCase):
         ]
     }
 
-    # Test with ranger plugin enabled, validation fails
-    res_expected = [{'config-type': 'spark2-defaults', 'message': 'Value should be set', 'type': 'configuration', 'config-name': 'spark.yarn.queue', 'level': 'ERROR'}]
+    res_expected = []
 
     res = self.stackAdvisor.validateSpark2Defaults(properties, recommendedDefaults, configurations, services, {})
-    self.assertEquals(res, res_expected)
+    self.assertEquals(res_expected, res)
 
 
   def test_recommendOozieConfigurations_noFalconServer(self):


[34/50] ambari git commit: AMBARI-20124. Remove OrderedDict python module code from ranger stack (oleewere)

Posted by nc...@apache.org.
AMBARI-20124. Remove OrderedDict python module code from ranger stack (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/11287328
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/11287328
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/11287328

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 112873287bd386dca5c54f8559c7b8c8a2701471
Parents: 9c6c20f
Author: oleewere <ol...@gmail.com>
Authored: Thu Feb 23 10:37:14 2017 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Thu Feb 23 10:37:32 2017 +0100

----------------------------------------------------------------------
 .../RANGER/0.4.0/package/scripts/setup_ranger_xml.py   | 13 ++++++-------
 1 file changed, 6 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/11287328/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
index b3eb919..15f7cf7 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
@@ -19,7 +19,6 @@ limitations under the License.
 """
 import os
 import re
-from collections import OrderedDict
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions.default import default
 from resource_management.core.logger import Logger
@@ -721,8 +720,8 @@ def setup_ranger_audit_solr():
     solr_cloud_util.add_solr_roles(params.config,
                                    roles = [params.infra_solr_role_ranger_admin, params.infra_solr_role_ranger_audit, params.infra_solr_role_dev],
                                    new_service_principals = [params.ranger_admin_jaas_principal])
-    service_default_principals_map = OrderedDict([('hdfs', 'nn'), ('hbase', 'hbase'), ('hive', 'hive'), ('kafka', 'kafka'), ('kms', 'rangerkms'),
-                                                  ('knox', 'knox'), ('nifi', 'nifi'), ('storm', 'storm'), ('yanr', 'yarn')])
+    service_default_principals_map = [('hdfs', 'nn'), ('hbase', 'hbase'), ('hive', 'hive'), ('kafka', 'kafka'), ('kms', 'rangerkms'),
+                                                  ('knox', 'knox'), ('nifi', 'nifi'), ('storm', 'storm'), ('yanr', 'yarn')]
     service_principals = get_ranger_plugin_principals(service_default_principals_map)
     solr_cloud_util.add_solr_roles(params.config,
                                    roles = [params.infra_solr_role_ranger_audit, params.infra_solr_role_dev],
@@ -767,17 +766,17 @@ def secure_znode(znode, jaasFile):
                                jaas_file=jaasFile,
                                java64_home=params.java_home, sasl_users=[params.ranger_admin_jaas_principal])
 
-def get_ranger_plugin_principals(services_defaults_map):
+def get_ranger_plugin_principals(services_defaults_tuple_list):
   """
   Get ranger plugin user principals from service-default value maps using ranger-*-audit configurations
   """
   import params
   user_principals = []
-  if len(services_defaults_map) < 1:
+  if len(services_defaults_tuple_list) < 1:
     raise Exception("Services - defaults map parameter is missing.")
 
-  for key, default_value in services_defaults_map.iteritems():
-    user_principal = default(format("configurations/ranger-{key}-audit/xasecure.audit.jaas.Client.option.principal"), default_value)
+  for (service, default_value) in services_defaults_tuple_list:
+    user_principal = default(format("configurations/ranger-{service}-audit/xasecure.audit.jaas.Client.option.principal"), default_value)
     user_principals.append(user_principal)
   return user_principals
 


[45/50] ambari git commit: AMBARI-20050. Issue while importing workflow with insufficient permissions.(Madhan Mohan Reddy via gauravn7)

Posted by nc...@apache.org.
AMBARI-20050. Issue while importing workflow with insufficient permissions.(Madhan Mohan Reddy via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9fdeec1a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9fdeec1a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9fdeec1a

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 9fdeec1afecf1983dfdb633bca0ab07b15c6d24c
Parents: bb7b83f
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Thu Feb 23 19:24:00 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Thu Feb 23 19:24:31 2017 +0530

----------------------------------------------------------------------
 .../apache/oozie/ambari/view/OozieDelegate.java |  12 +-
 .../ambari/view/OozieProxyImpersonator.java     | 381 +++++++------------
 .../oozie/ambari/view/assets/AssetResource.java |  93 +++--
 .../oozie/ambari/view/exception/ErrorCode.java  |  58 +++
 .../ambari/view/exception/WfmException.java     |  46 +++
 .../ambari/view/exception/WfmWebException.java  | 115 ++++++
 .../WorkflowsManagerResource.java               |  36 +-
 .../ui/app/components/bundle-config.js          |   8 +-
 .../resources/ui/app/components/coord-config.js |   6 +-
 .../ui/app/components/designer-errors.js        |  49 +++
 .../ui/app/components/flow-designer.js          |  69 +---
 .../ui/app/components/stack-trace-dialog.js     |  26 ++
 .../src/main/resources/ui/app/styles/app.less   |  13 +-
 .../app/templates/components/bundle-config.hbs  |   4 +-
 .../app/templates/components/coord-config.hbs   |   2 +-
 .../templates/components/designer-errors.hbs    |  17 +
 .../app/templates/components/flow-designer.hbs  |  18 +-
 .../templates/components/stack-trace-dialog.hbs |  33 ++
 .../components/stack-trace-dialog-test.js       |  40 ++
 19 files changed, 648 insertions(+), 378 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java
index 55c4312..6f3c4d2 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java
@@ -32,6 +32,8 @@ import javax.ws.rs.core.Response;
 
 import org.apache.ambari.view.ViewContext;
 import org.apache.commons.io.IOUtils;
+import org.apache.oozie.ambari.view.exception.ErrorCode;
+import org.apache.oozie.ambari.view.exception.WfmException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -90,10 +92,16 @@ public class OozieDelegate {
 
     LOGGER.info("Resp from oozie status entity=="
       + serviceResponse.getEntity());
+    String oozieResp=null;
     if (serviceResponse.getEntity() instanceof String) {
-      return (String) serviceResponse.getEntity();
+      oozieResp= (String) serviceResponse.getEntity();
     } else {
-      return "success";
+      oozieResp= serviceResponse.getEntity().toString();
+    }
+    if (oozieResp != null && oozieResp.trim().startsWith("{")) {
+      return  oozieResp;
+    }else{
+      throw new WfmException(oozieResp,ErrorCode.OOZIE_SUBMIT_ERROR);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
index 6603a9c..c4e5bbd 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
@@ -49,9 +49,10 @@ import javax.ws.rs.core.UriInfo;
 import org.apache.ambari.view.ViewContext;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.exception.ExceptionUtils;
-import org.apache.hadoop.security.AccessControlException;
 import org.apache.oozie.ambari.view.assets.AssetResource;
+import org.apache.oozie.ambari.view.exception.ErrorCode;
+import org.apache.oozie.ambari.view.exception.WfmException;
+import org.apache.oozie.ambari.view.exception.WfmWebException;
 import org.apache.oozie.ambari.view.workflowmanager.WorkflowManagerService;
 import org.apache.oozie.ambari.view.workflowmanager.WorkflowsManagerResource;
 import org.slf4j.Logger;
@@ -83,30 +84,7 @@ public class OozieProxyImpersonator {
   private final OozieUtils oozieUtils = new OozieUtils();
   private final AssetResource assetResource;
 
-  private enum ErrorCodes {
-    OOZIE_SUBMIT_ERROR("error.oozie.submit", "Oozie Submit error"), OOZIE_IO_ERROR(
-      "error.oozie.io", "Oozie I/O error"), FILE_ACCESS_ACL_ERROR(
-      "error.file.access.control",
-      "Access Error to file due to access control"), FILE_ACCESS_UNKNOWN_ERROR(
-      "error.file.access", "Error accessing file"), WORKFLOW_PATH_EXISTS(
-      "error.workflow.path.exists", "Workflow Path exists"), WORKFLOW_XML_DOES_NOT_EXIST(
-      "error.workflow.xml.not.exists", "Workflow Xml does not exist");
-    private String errorCode;
-    private String description;
-
-    ErrorCodes(String errorCode, String description) {
-      this.errorCode = errorCode;
-      this.description = description;
-    }
-
-    public String getErrorCode() {
-      return errorCode;
-    }
 
-    public String getDescription() {
-      return description;
-    }
-  }
   private static enum WorkflowFormat{
     XML("xml"),
     DRAFT("draft");
@@ -139,15 +117,23 @@ public class OozieProxyImpersonator {
   @GET
   @Path("hdfsCheck")
   public Response hdfsCheck(){
-    hdfsFileUtils.hdfsCheck();
-    return Response.ok().build();
+    try {
+      hdfsFileUtils.hdfsCheck();
+      return Response.ok().build();
+    }catch (Exception e){
+      throw new WfmWebException(e);
+    }
   }
 
   @GET
   @Path("homeDirCheck")
   public Response homeDirCheck(){
-    hdfsFileUtils.homeDirCheck();
-    return Response.ok().build();
+    try{
+      hdfsFileUtils.homeDirCheck();
+      return Response.ok().build();
+    }catch (Exception e){
+      throw new WfmWebException(e);
+    }
   }
 
   @Path("/fileServices")
@@ -189,57 +175,89 @@ public class OozieProxyImpersonator {
                             @QueryParam("projectId") String projectId,
                             @DefaultValue("false") @QueryParam("overwrite") Boolean overwrite,
                             @QueryParam("description") String description,
-                            @QueryParam("jobType") String jobType) {
+                            @QueryParam("jobType") String jobTypeString) {
     LOGGER.info("submit workflow job called");
-    return submitJobInternal(postBody, headers, ui, appPath, overwrite,
-      JobType.valueOf(jobType), projectId, description);
+    JobType jobType = JobType.valueOf(jobTypeString);
+    if (StringUtils.isEmpty(appPath)) {
+      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
+    }
+    appPath = workflowFilesService.getWorkflowFileName(appPath.trim(), jobType);
+    try {
+      if (!overwrite) {
+        boolean fileExists = hdfsFileUtils.fileExists(appPath);
+        if (fileExists) {
+          throw new WfmWebException(ErrorCode.WORKFLOW_PATH_EXISTS);
+        }
+      }
+      postBody = utils.formatXml(postBody);
+
+      String filePath = workflowFilesService.createFile(appPath, postBody, overwrite);
+      LOGGER.info(String.format("submit workflow job done. filePath=[%s]", filePath));
+
+      if (PROJ_MANAGER_ENABLED) {
+        String name = oozieUtils.deduceWorkflowNameFromXml(postBody);
+        workflowManagerService.saveWorkflow(projectId, appPath, jobType,
+          null, viewContext.getUsername(), name);
+      }
+      String response = oozieDelegate.submitWorkflowJobToOozie(headers,
+        appPath, ui.getQueryParameters(), jobType);
+      return Response.status(Status.OK).entity(response).build();
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch(WfmException ex){
+      throw new WfmWebException(ex,ex.getErrorCode());
+    } catch(Exception ex) {
+      throw new WfmWebException(ex);
+    }
   }
 
   @POST
   @Path("/saveWorkflow")
   @Consumes({MediaType.TEXT_PLAIN + "," + MediaType.TEXT_XML})
   public Response saveWorkflow(String postBody, @Context HttpHeaders headers,
-                               @Context UriInfo ui, @QueryParam("app.path") String appPath, @QueryParam("jobType") String jobTypeStr,
+                               @Context UriInfo ui, @QueryParam("app.path") String appPath,
+                               @QueryParam("jobType") String jobTypeStr,
                                @DefaultValue("false") @QueryParam("overwrite") Boolean overwrite) {
     LOGGER.info("save workflow  called");
     if (StringUtils.isEmpty(appPath)) {
-      throw new RuntimeException("app path can't be empty.");
+      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
     }
     JobType jobType = StringUtils.isEmpty(jobTypeStr) ? JobType.WORKFLOW : JobType.valueOf(jobTypeStr);
     String workflowFilePath = workflowFilesService.getWorkflowFileName(appPath.trim(), jobType);
-    if (!overwrite) {
-      boolean fileExists = hdfsFileUtils.fileExists(workflowFilePath);
-      if (fileExists) {
-        return getFileExistsResponse();
-      }
-    }
-
     try {
+      if (!overwrite) {
+        boolean fileExists = hdfsFileUtils.fileExists(workflowFilePath);
+        if (fileExists) {
+          throw new WfmWebException(ErrorCode.WORKFLOW_PATH_EXISTS);
+        }
+      }
       if (utils.isXml(postBody)) {
         saveWorkflowXml(jobType, appPath, postBody, overwrite);
       } else {
         saveDraft(jobType, appPath, postBody, overwrite);
       }
       if (PROJ_MANAGER_ENABLED) {
-        workflowManagerService.saveWorkflow(null, workflowFilePath,
-          jobType, null,
+        workflowManagerService.saveWorkflow(null, workflowFilePath, jobType, null,
           viewContext.getUsername(), getWorkflowName(postBody));
       }
-    } catch (IOException ex) {
-      return getRespCodeForException(ex);
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch (Exception ex) {
+       throw new WfmWebException(ex);
     }
-
     return Response.ok().build();
   }
-  private String getWorkflowName(String postBody){
+
+  private String getWorkflowName(String postBody) {
     if (utils.isXml(postBody)) {
       return oozieUtils.deduceWorkflowNameFromXml(postBody);
-    }else{
+    } else {
       return oozieUtils.deduceWorkflowNameFromJson(postBody);
     }
   }
 
-  private void saveWorkflowXml(JobType jobType, String appPath, String postBody, Boolean overwrite) throws IOException {
+  private void saveWorkflowXml(JobType jobType, String appPath, String postBody,
+                               Boolean overwrite) throws IOException {
     appPath = workflowFilesService.getWorkflowFileName(appPath.trim(), jobType);
     postBody = utils.formatXml(postBody);
     workflowFilesService.createFile(appPath, postBody, overwrite);
@@ -267,49 +285,47 @@ public class OozieProxyImpersonator {
                                @DefaultValue("false") @QueryParam("overwrite") Boolean overwrite) {
     LOGGER.info("publish asset called");
     if (StringUtils.isEmpty(uploadPath)) {
-      throw new RuntimeException("upload path can't be empty.");
+      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
     }
     uploadPath = uploadPath.trim();
-    Map<String, String> validateAsset = assetResource.validateAsset(headers, postBody,
-      ui.getQueryParameters());
-    if (!STATUS_OK.equals(validateAsset.get(STATUS_KEY))) {
-      return Response.status(Status.BAD_REQUEST).entity(
-        validateAsset.get(MESSAGE_KEY)).build();
+    try {
+      Map<String, String> validateAsset = assetResource.validateAsset(headers, postBody,
+        ui.getQueryParameters());
+      if (!STATUS_OK.equals(validateAsset.get(STATUS_KEY))) {
+        WfmWebException wfmEx=new WfmWebException(ErrorCode.INVALID_ASSET_INPUT);
+        wfmEx.setAdditionalDetail(validateAsset.get(MESSAGE_KEY));
+        throw wfmEx;
+      }
+      return saveAsset(postBody, uploadPath, overwrite);
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new WfmWebException(ex);
     }
-    return saveAsset(postBody, uploadPath, overwrite);
   }
 
-  private Response saveAsset(String postBody, String uploadPath,
-                             Boolean overwrite) {
+  private Response saveAsset(String postBody, String uploadPath, Boolean overwrite) throws IOException {
     uploadPath = workflowFilesService.getAssetFileName(uploadPath);
     if (!overwrite) {
       boolean fileExists = hdfsFileUtils.fileExists(uploadPath);
       if (fileExists) {
-        return getFileExistsResponse();
+        throw new WfmWebException(ErrorCode.WORKFLOW_PATH_EXISTS);
       }
     }
     postBody = utils.formatXml(postBody);
-    try {
-      String filePath = workflowFilesService.createAssetFile(uploadPath,
-        postBody, overwrite);
-      LOGGER.info(String.format("publish asset job done. filePath=[%s]",
-        filePath));
-      return Response.ok().build();
-    } catch (Exception ex) {
-      LOGGER.error(ex.getMessage(), ex);
-      return getRespCodeForException(ex);
-    }
+    String filePath = workflowFilesService.createAssetFile(uploadPath, postBody, overwrite);
+    LOGGER.info(String.format("publish asset job done. filePath=[%s]", filePath));
+    return Response.ok().build();
   }
+
   @GET
   @Path("/readAsset")
-  public Response readAsset(
-          @QueryParam("assetPath") String assetPath) {
+  public Response readAsset(@QueryParam("assetPath") String assetPath) {
     if (StringUtils.isEmpty(assetPath)) {
-      throw new RuntimeException("assetPath can't be empty.");
+      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
     }
     try {
-      final InputStream is = workflowFilesService
-              .readAssset(assetPath);
+      final InputStream is = workflowFilesService.readAssset(assetPath);
       StreamingOutput streamer = new StreamingOutput() {
         @Override
         public void write(OutputStream os) throws IOException,
@@ -320,17 +336,16 @@ public class OozieProxyImpersonator {
         }
       };
       return Response.ok(streamer).status(200).build();
-    } catch (IOException e) {
-      return getRespCodeForException(e);
+    } catch (IOException ex) {
+      throw new WfmWebException(ex);
     }
   }
 
-
   @GET
   @Path("/readWorkflowDraft")
   public Response readDraft(@QueryParam("workflowXmlPath") String workflowPath) {
     if (StringUtils.isEmpty(workflowPath)) {
-      throw new RuntimeException("workflowXmlPath can't be empty.");
+      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
     }
     try {
       final InputStream is = workflowFilesService.readDraft(workflowPath);
@@ -344,113 +359,37 @@ public class OozieProxyImpersonator {
         }
       };
       return Response.ok(streamer).status(200).build();
-    } catch (IOException e) {
-      return getRespCodeForException(e);
+    } catch (IOException ex) {
+      throw new WfmWebException(ex);
     }
   }
 
   @POST
   @Path("/discardWorkflowDraft")
   public Response discardDraft(
-    @QueryParam("workflowXmlPath") String workflowPath)
-    throws IOException {
-    workflowFilesService.discardDraft(workflowPath);
-    return Response.ok().build();
-  }
-
-  private Response submitJobInternal(String postBody, HttpHeaders headers,
-                                     UriInfo ui, String appPath, Boolean overwrite, JobType jobType,
-                                     String projectId, String description) {
-    if (StringUtils.isEmpty(appPath)) {
-      throw new RuntimeException("app path can't be empty.");
-    }
-    appPath = workflowFilesService.getWorkflowFileName(appPath.trim(), jobType);
-    if (!overwrite) {
-      boolean fileExists = hdfsFileUtils.fileExists(appPath);
-      if (fileExists) {
-        return getFileExistsResponse();
-      }
-    }
-    postBody = utils.formatXml(postBody);
+    @QueryParam("workflowXmlPath") String workflowPath) {
     try {
-      String filePath = workflowFilesService.createFile(appPath, postBody,
-        overwrite);
-      LOGGER.info(String.format(
-        "submit workflow job done. filePath=[%s]", filePath));
-    } catch (Exception ex) {
-      LOGGER.error(ex.getMessage(), ex);
-      return getRespCodeForException(ex);
-
-    }
-    if (PROJ_MANAGER_ENABLED) {
-      String name = oozieUtils.deduceWorkflowNameFromXml(postBody);
-      workflowManagerService.saveWorkflow(projectId, appPath, jobType,
-        "todo description", viewContext.getUsername(), name);
-    }
-
-    String response = oozieDelegate.submitWorkflowJobToOozie(headers,
-      appPath, ui.getQueryParameters(), jobType);
-    if (response != null && response.trim().startsWith("{")) {
-      // dealing with oozie giving error but with 200 response.
-      return Response.status(Response.Status.OK).entity(response).build();
-    } else {
-      HashMap<String, String> resp = new HashMap<String, String>();
-      resp.put("status", ErrorCodes.OOZIE_SUBMIT_ERROR.getErrorCode());
-      resp.put("message", response);
-      return Response.status(Response.Status.BAD_REQUEST).entity(resp)
-        .build();
+      workflowFilesService.discardDraft(workflowPath);
+      return Response.ok().build();
+    } catch (IOException ex) {
+      throw new WfmWebException(ex);
     }
-
-  }
-
-  private Response getRespCodeForException(Exception ex) {
-    if (ex instanceof AccessControlException) {
-      HashMap<String, String> errorDetails = getErrorDetails(
-        ErrorCodes.FILE_ACCESS_ACL_ERROR.getErrorCode(),
-        ErrorCodes.FILE_ACCESS_ACL_ERROR.getDescription(), ex);
-      return Response.status(Response.Status.BAD_REQUEST)
-        .entity(errorDetails).build();
-    } else if (ex instanceof IOException) {
-      HashMap<String, String> errorDetails = getErrorDetails(
-        ErrorCodes.FILE_ACCESS_UNKNOWN_ERROR.getErrorCode(),
-        ErrorCodes.FILE_ACCESS_UNKNOWN_ERROR.getDescription(), ex);
-      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
-        .entity(errorDetails).build();
-    } else {
-      HashMap<String, String> errorDetails = getErrorDetails(
-        ErrorCodes.FILE_ACCESS_UNKNOWN_ERROR.getErrorCode(),
-        ErrorCodes.FILE_ACCESS_UNKNOWN_ERROR.getDescription(), ex);
-      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
-        .entity(errorDetails).build();
-    }
-
-  }
-
-  private Response getFileExistsResponse() {
-    HashMap<String, String> resp = new HashMap<String, String>();
-    resp.put("status", ErrorCodes.WORKFLOW_PATH_EXISTS.getErrorCode());
-    resp.put("message", ErrorCodes.WORKFLOW_PATH_EXISTS.getDescription());
-    return Response.status(Response.Status.BAD_REQUEST).entity(resp)
-      .build();
   }
 
   @GET
   @Path("/readWorkflow")
   public Response readWorkflow(
     @QueryParam("workflowPath") String workflowPath, @QueryParam("jobType") String jobTypeStr) {
-    String workflowFileName=workflowFilesService.getWorkflowFileName(workflowPath, JobType.valueOf(jobTypeStr));
-    if (!hdfsFileUtils.fileExists(workflowFileName)){
-      HashMap<String,String> response=new HashMap<>();
-      response.put("status", ErrorCodes.WORKFLOW_XML_DOES_NOT_EXIST.getErrorCode());
-      response.put("message", ErrorCodes.WORKFLOW_XML_DOES_NOT_EXIST.getDescription());
-      return Response.status(Status.BAD_REQUEST).entity(response).build();
-    }
+    try {
+      String workflowFileName = workflowFilesService.getWorkflowFileName(workflowPath, JobType.valueOf(jobTypeStr));
+      if (!hdfsFileUtils.fileExists(workflowFileName)) {
+        throw new WfmWebException(ErrorCode.WORKFLOW_XML_DOES_NOT_EXIST);
+      }
+      WorkflowFileInfo workflowDetails = workflowFilesService
+        .getWorkflowDetails(workflowPath, JobType.valueOf(jobTypeStr));
+      if (workflowPath.endsWith(Constants.WF_DRAFT_EXTENSION) || workflowDetails.getIsDraftCurrent()) {
+        String filePath = workflowFilesService.getWorkflowDraftFileName(workflowPath, JobType.valueOf(jobTypeStr));
 
-    WorkflowFileInfo workflowDetails = workflowFilesService
-      .getWorkflowDetails(workflowPath, JobType.valueOf(jobTypeStr));
-    if (workflowPath.endsWith(Constants.WF_DRAFT_EXTENSION) || workflowDetails.getIsDraftCurrent()) {
-      String filePath = workflowFilesService.getWorkflowDraftFileName(workflowPath, JobType.valueOf(jobTypeStr));
-      try {
         InputStream inputStream = workflowFilesService.readWorkflowXml(filePath);
         String stringResponse = IOUtils.toString(inputStream);
         if (!workflowFilesService.isDraftFormatCurrent(stringResponse)) {
@@ -459,36 +398,35 @@ public class OozieProxyImpersonator {
         } else {
           return Response.ok(stringResponse).header(RESPONSE_TYPE, WorkflowFormat.DRAFT.getValue()).build();
         }
-      } catch (IOException e) {
-        return getRespCodeForException(e);
+      } else {
+        String filePath = workflowFilesService.getWorkflowFileName(workflowPath, JobType.valueOf(jobTypeStr));
+        return getWorkflowResponse(filePath, WorkflowFormat.XML.getValue(), false);
       }
-    } else {
-      String filePath = workflowFilesService.getWorkflowFileName(workflowPath, JobType.valueOf(jobTypeStr));
-      return getWorkflowResponse(filePath, WorkflowFormat.XML.getValue(), false);
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new WfmWebException(ex);
     }
   }
 
-  private Response getWorkflowResponse(String filePath, String responseType, boolean olderFormatDraftIngored) {
-    try {
-      final InputStream is = workflowFilesService
-        .readWorkflowXml(filePath);
-      StreamingOutput streamer = new StreamingOutput() {
-        @Override
-        public void write(OutputStream os) throws IOException,
-          WebApplicationException {
-          IOUtils.copy(is, os);
-          is.close();
-          os.close();
-        }
-      };
-      Response.ResponseBuilder responseBuilder = Response.ok(streamer).header(RESPONSE_TYPE, responseType);
-      if(olderFormatDraftIngored){
-        responseBuilder.header(OLDER_FORMAT_DRAFT_INGORED,Boolean.TRUE.toString());
+  private Response getWorkflowResponse(String filePath, String responseType,
+                                       boolean olderFormatDraftIngored) throws IOException {
+    final InputStream is = workflowFilesService.readWorkflowXml(filePath);
+    StreamingOutput streamer = new StreamingOutput() {
+      @Override
+      public void write(OutputStream os) throws IOException,
+        WebApplicationException {
+        IOUtils.copy(is, os);
+        is.close();
+        os.close();
       }
-      return  responseBuilder.build();
-    } catch (IOException e) {
-      return getRespCodeForException(e);
+    };
+    Response.ResponseBuilder responseBuilder = Response.ok(streamer).header(RESPONSE_TYPE, responseType);
+    if (olderFormatDraftIngored) {
+      responseBuilder.header(OLDER_FORMAT_DRAFT_INGORED, Boolean.TRUE.toString());
     }
+    return responseBuilder.build();
+
   }
 
   @GET
@@ -496,12 +434,13 @@ public class OozieProxyImpersonator {
   public Response readWorkflowXml(
     @QueryParam("workflowXmlPath") String workflowPath,@QueryParam("jobType") String jobTypeStr) {
     if (StringUtils.isEmpty(workflowPath)) {
-      throw new RuntimeException("workflowXmlPath can't be empty.");
+      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
     }
-
     try {
-      final InputStream is = workflowFilesService
-        .readWorkflowXml(workflowPath);
+      if (!hdfsFileUtils.fileExists(workflowPath)) {
+        throw new WfmWebException(ErrorCode.WORKFLOW_XML_DOES_NOT_EXIST);
+      }
+      final InputStream is = workflowFilesService.readWorkflowXml(workflowPath);
       StreamingOutput streamer = new StreamingOutput() {
         @Override
         public void write(OutputStream os) throws IOException,
@@ -512,22 +451,11 @@ public class OozieProxyImpersonator {
         }
       };
       return Response.ok(streamer).status(200).build();
-    } catch (IOException e) {
-      return getRespCodeForException(e);
-    }
-  }
-
-  private HashMap<String, String> getErrorDetails(String status,
-                                                  String message, Exception ex) {
-    HashMap<String, String> resp = new HashMap<String, String>();
-    resp.put("status", status);
-    if (message != null) {
-      resp.put("message", message);
-    }
-    if (ex != null) {
-      resp.put("stackTrace", ExceptionUtils.getFullStackTrace(ex));
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new WfmWebException(ex);
     }
-    return resp;
   }
 
   @GET
@@ -538,8 +466,7 @@ public class OozieProxyImpersonator {
         .getPath(), ui.getQueryParameters(), HttpMethod.GET, null);
     } catch (Exception ex) {
       LOGGER.error("Error in GET proxy", ex);
-      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
-        .entity(getErrorDetailsForException("Oozie", ex)).build();
+      throw new WfmWebException(ex);
     }
   }
 
@@ -548,13 +475,11 @@ public class OozieProxyImpersonator {
   public Response handlePost(String xml, @Context HttpHeaders headers,
                              @Context UriInfo ui) {
     try {
-
       return oozieDelegate.consumeService(headers, ui.getAbsolutePath()
         .getPath(), ui.getQueryParameters(), HttpMethod.POST, xml);
     } catch (Exception ex) {
       LOGGER.error("Error in POST proxy", ex);
-      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
-        .entity(getErrorDetailsForException("Oozie", ex)).build();
+      throw new WfmWebException(ex);
     }
   }
 
@@ -567,8 +492,7 @@ public class OozieProxyImpersonator {
         .getPath(), ui.getQueryParameters(), HttpMethod.POST, null);
     } catch (Exception ex) {
       LOGGER.error("Error in DELETE proxy", ex);
-      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
-        .entity(getErrorDetailsForException("Oozie", ex)).build();
+      throw new WfmWebException(ex);
     }
   }
 
@@ -581,22 +505,7 @@ public class OozieProxyImpersonator {
         .getPath(), ui.getQueryParameters(), HttpMethod.PUT, body);
     } catch (Exception ex) {
       LOGGER.error("Error in PUT proxy", ex);
-      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
-        .entity(getErrorDetailsForException("Oozie", ex)).build();
-    }
-  }
-
-  private Map<String, String> getErrorDetailsForException(String component,
-                                                          Exception ex) {
-    String errorCode = component + "exception";
-    String errorMessage = component + " Exception";
-    if (ex instanceof RuntimeException) {
-      Throwable cause = ex.getCause();
-      if (cause instanceof IOException) {
-        errorCode = component + "io.exception";
-        errorMessage = component + "IO Exception";
-      }
+      throw new WfmWebException(ex);
     }
-    return getErrorDetails(errorCode, errorMessage, ex);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
index ef3b508..3355c85 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
@@ -25,13 +25,15 @@ import org.apache.oozie.ambari.view.*;
 import org.apache.oozie.ambari.view.assets.model.ActionAsset;
 import org.apache.oozie.ambari.view.assets.model.ActionAssetDefinition;
 import org.apache.oozie.ambari.view.assets.model.AssetDefintion;
+import org.apache.oozie.ambari.view.exception.ErrorCode;
+import org.apache.oozie.ambari.view.exception.WfmException;
+import org.apache.oozie.ambari.view.exception.WfmWebException;
 import org.apache.oozie.ambari.view.model.APIResult;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import javax.ws.rs.*;
 import javax.ws.rs.core.*;
-import javax.ws.rs.core.Response.Status;
 import java.io.IOException;
 import java.util.*;
 
@@ -65,7 +67,7 @@ public class AssetResource {
       result.setData(assets);
       return Response.ok(result).build();
     } catch (Exception e) {
-      throw new ServiceFormattedException(e);
+      throw new WfmWebException(e);
     }
   }
 
@@ -80,7 +82,7 @@ public class AssetResource {
       result.setData(assets);
       return Response.ok(result).build();
     } catch (Exception e) {
-      throw new ServiceFormattedException(e);
+      throw new WfmWebException(e);
     }
   }
   @POST
@@ -88,19 +90,20 @@ public class AssetResource {
                             @QueryParam("id") String id, @Context UriInfo ui, String body) {
     try {
       Gson gson = new Gson();
-      AssetDefintion assetDefinition = gson.fromJson(body,
-        AssetDefintion.class);
+      AssetDefintion assetDefinition = gson.fromJson(body, AssetDefintion.class);
       Map<String, String> validateAsset = validateAsset(headers,
         assetDefinition.getDefinition(), ui.getQueryParameters());
       if (!STATUS_OK.equals(validateAsset.get(STATUS_KEY))) {
-        return Response.status(Status.BAD_REQUEST).build();
+        throw new WfmWebException(ErrorCode.ASSET_INVALID_FROM_OOZIE);
       }
       assetService.saveAsset(id, viewContext.getUsername(), assetDefinition);
       APIResult result = new APIResult();
       result.setStatus(APIResult.Status.SUCCESS);
       return Response.ok(result).build();
-    } catch (Exception e) {
-      throw new ServiceFormattedException(e);
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new WfmWebException(ex);
     }
   }
 
@@ -113,43 +116,50 @@ public class AssetResource {
   public Map<String, String> validateAsset(HttpHeaders headers,
                                            String postBody, MultivaluedMap<String, String> queryParams) {
     String workflowXml = oozieUtils.generateWorkflowXml(postBody);
+    Map<String, String> result = new HashMap<>();
+    String tempWfPath = "/tmp" + "/tmpooziewfs/tempwf_" + Math.round(Math.random() * 100000) + ".xml";
     try {
-      Map<String, String> result = new HashMap<>();
-      String tempWfPath = "/tmp" + "/tmpooziewfs/tempwf_" + Math.round(Math.random()*100000) + ".xml";
       hdfsFileUtils.writeToFile(tempWfPath, workflowXml, true);
-      queryParams.put("oozieparam.action", getAsList("dryrun"));
-      queryParams.put("oozieconfig.rerunOnFailure", getAsList("false"));
-      queryParams.put("oozieconfig.useSystemLibPath", getAsList("true"));
-      queryParams.put("resourceManager", getAsList("useDefault"));
-      String dryRunResp = oozieDelegate.submitWorkflowJobToOozie(headers,
-        tempWfPath, queryParams, JobType.WORKFLOW);
-      LOGGER.info(String.format("resp from validating asset=[%s]",
-        dryRunResp));
+    } catch (IOException e) {
+      throw new WfmWebException(e, ErrorCode.FILE_ACCESS_UNKNOWN_ERROR);
+    }
+    queryParams.put("oozieparam.action", getAsList("dryrun"));
+    queryParams.put("oozieconfig.rerunOnFailure", getAsList("false"));
+    queryParams.put("oozieconfig.useSystemLibPath", getAsList("true"));
+    queryParams.put("resourceManager", getAsList("useDefault"));
+    String dryRunResp = oozieDelegate.submitWorkflowJobToOozie(headers,
+      tempWfPath, queryParams, JobType.WORKFLOW);
+    LOGGER.info(String.format("resp from validating asset=[%s]", dryRunResp));
+    try {
       hdfsFileUtils.deleteFile(tempWfPath);
-      if (dryRunResp != null && dryRunResp.trim().startsWith("{")) {
-        JsonElement jsonElement = new JsonParser().parse(dryRunResp);
-        JsonElement idElem = jsonElement.getAsJsonObject().get("id");
-        if (idElem != null) {
-          result.put(STATUS_KEY, STATUS_OK);
-        } else {
-          result.put(STATUS_KEY, STATUS_FAILED);
-          result.put(MESSAGE_KEY, dryRunResp);
-        }
+    } catch (IOException e) {
+      throw new WfmWebException(e, ErrorCode.FILE_ACCESS_UNKNOWN_ERROR);
+    }
+    if (dryRunResp != null && dryRunResp.trim().startsWith("{")) {
+      JsonElement jsonElement = new JsonParser().parse(dryRunResp);
+      JsonElement idElem = jsonElement.getAsJsonObject().get("id");
+      if (idElem != null) {
+        result.put(STATUS_KEY, STATUS_OK);
       } else {
         result.put(STATUS_KEY, STATUS_FAILED);
         result.put(MESSAGE_KEY, dryRunResp);
       }
-      return result;
-    } catch (IOException e) {
-      throw new RuntimeException(e);
+    } else {
+      result.put(STATUS_KEY, STATUS_FAILED);
+      result.put(MESSAGE_KEY, dryRunResp);
     }
+    return result;
   }
 
   @GET
   @Path("/assetNameAvailable")
   public Response assetNameAvailable(@QueryParam("name") String name){
-    boolean available=assetService.isAssetNameAvailable(name);
-    return Response.ok(available).build();
+    try {
+      boolean available = assetService.isAssetNameAvailable(name);
+      return Response.ok(available).build();
+    }catch (Exception e){
+      throw new WfmWebException(e);
+    }
   }
 
   @GET
@@ -162,7 +172,7 @@ public class AssetResource {
       result.setData(assetDefinition);
       return Response.ok(result).build();
     } catch (Exception e) {
-      throw new ServiceFormattedException(e);
+      throw new WfmWebException(e);
     }
   }
 
@@ -170,14 +180,13 @@ public class AssetResource {
   @Path("/definition/id}")
   public Response getAssetDefinition(@PathParam("defnitionId") String id) {
     try {
-      ActionAssetDefinition assetDefinition = assetService
-        .getAssetDefinition(id);
+      ActionAssetDefinition assetDefinition = assetService.getAssetDefinition(id);
       APIResult result = new APIResult();
       result.setStatus(APIResult.Status.SUCCESS);
       result.setData(assetDefinition);
       return Response.ok(result).build();
     } catch (Exception e) {
-      throw new ServiceFormattedException(e);
+      throw new WfmWebException(e);
     }
   }
 
@@ -187,19 +196,19 @@ public class AssetResource {
     try {
       ActionAsset asset = assetService.getAsset(id);
       if (asset == null) {
-        throw new RuntimeException("Asset doesnt exist");
+        throw new WfmWebException(ErrorCode.ASSET_NOT_EXIST);
       }
       if (!viewContext.getUsername().equals(asset.getOwner())){
-        throw new RuntimeException(
-          "Dont have permission to delete this asset");
+        throw new WfmWebException(ErrorCode.PERMISSION_ERROR);
       }
       assetService.deleteAsset(id);
       APIResult result = new APIResult();
       result.setStatus(APIResult.Status.SUCCESS);
       return Response.ok(result).build();
-    } catch (Exception e) {
-      throw new ServiceFormattedException(e);
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new WfmWebException(ex);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/exception/ErrorCode.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/exception/ErrorCode.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/exception/ErrorCode.java
new file mode 100644
index 0000000..6495d2c
--- /dev/null
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/exception/ErrorCode.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.ambari.view.exception;
+
+public enum ErrorCode {
+  OOZIE_SUBMIT_ERROR("error.oozie.submit", "Submitting job to Oozie failed. Please check your definition/configuration.",true),
+  FILE_ACCESS_ACL_ERROR("error.file.access.control", "Access Error to file due to access control", true),
+  FILE_ACCESS_UNKNOWN_ERROR("error.file.access", "Error accessing file"),
+  WORKFLOW_PATH_EXISTS("error.workflow.path.exists", "File exists", true),
+  WORKFLOW_XML_DOES_NOT_EXIST("error.workflow.xml.not.exists", "File does not exist", true),
+  INVALID_ASSET_INPUT("error.invalid.asset.input", "Invalid asset definition", true),
+  INVALID_EMPTY_INPUT("error.invalid.empty.input", "Input path cannot be empty", true),
+  ASSET_NOT_EXIST("error.asset.not.exist","Asset doesn\u2019t exist",true),
+  PERMISSION_ERROR("error.permission","Don\u2019t have permission",true),
+  ASSET_INVALID_FROM_OOZIE("error.oozie.asset.invalid","Invalid Asset Definition",true);
+
+  private String errorCode;
+  private String description;
+  private boolean isInputError = false;
+
+  ErrorCode(String errorCode, String description) {
+    this.errorCode = errorCode;
+    this.description = description;
+  }
+
+  ErrorCode(String errorCode, String description, boolean isInputError) {
+    this.errorCode = errorCode;
+    this.description = description;
+    this.isInputError = isInputError;
+  }
+
+  public String getErrorCode() {
+    return errorCode;
+  }
+
+  public String getDescription() {
+    return description;
+  }
+
+  public boolean isInputError() {
+    return isInputError;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/exception/WfmException.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/exception/WfmException.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/exception/WfmException.java
new file mode 100644
index 0000000..36ebec8
--- /dev/null
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/exception/WfmException.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.ambari.view.exception;
+
+
+public class WfmException extends RuntimeException {
+  private  ErrorCode errorCode;
+
+  public WfmException(ErrorCode errorCode) {
+    this.errorCode = errorCode;
+  }
+
+  public WfmException(String message, ErrorCode errorCode) {
+    super(message);
+    this.errorCode = errorCode;
+  }
+
+  public WfmException(String message, Throwable cause, ErrorCode errorCode) {
+    super(message, cause);
+    this.errorCode = errorCode;
+  }
+
+  public WfmException(Throwable cause, ErrorCode errorCode) {
+    super(cause);
+    this.errorCode = errorCode;
+  }
+
+  public ErrorCode getErrorCode() {
+    return errorCode;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/exception/WfmWebException.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/exception/WfmWebException.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/exception/WfmWebException.java
new file mode 100644
index 0000000..fd4ce80
--- /dev/null
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/exception/WfmWebException.java
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.ambari.view.exception;
+
+import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.hadoop.security.AccessControlException;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.util.HashMap;
+
+public class WfmWebException extends WebApplicationException {
+  private static final int STATUS = 500;
+  private ErrorCode errorCode;
+  private String additionalDetail = null;
+  private String message;
+  public WfmWebException(String message) {
+    super();
+    setMessage(message);
+  }
+
+  private void setMessage(String message) {
+    this.message=message;
+  }
+
+  public WfmWebException(Throwable cause) {
+    super(cause);
+  }
+
+  public WfmWebException(ErrorCode errorCode) {
+    super();
+    setMessage(errorCode.getDescription());
+    this.errorCode = errorCode;
+  }
+
+  public WfmWebException(String message, Throwable cause) {
+    super(cause);
+    setMessage(message);
+  }
+
+  public WfmWebException(String message, ErrorCode errorCode) {
+    super();
+    setMessage(message);
+    this.errorCode = errorCode;
+  }
+
+  public WfmWebException(String message, Throwable cause, ErrorCode errorCode) {
+    super(cause);
+    setMessage(message);
+    this.errorCode = errorCode;
+  }
+
+  public WfmWebException(Throwable cause, ErrorCode errorCode) {
+    super(cause);
+    setMessage(errorCode.getDescription());
+    this.errorCode = errorCode;
+  }
+
+
+  public void setAdditionalDetail(String additionalDetail) {
+    this.additionalDetail = additionalDetail;
+  }
+
+  @Override
+  public Response getResponse() {
+    HashMap<String, Object> response = new HashMap<String, Object>();
+    String trace = null;
+    Throwable ex = this.getCause();
+    if (ex != null) {
+      trace = ExceptionUtils.getStackTrace(ex);
+      if (ex instanceof AccessControlException) {
+        errorCode = ErrorCode.FILE_ACCESS_ACL_ERROR;
+      } else if (ex instanceof IOException) {
+        errorCode = ErrorCode.FILE_ACCESS_UNKNOWN_ERROR;
+      }
+    }else{
+      trace = ExceptionUtils.getStackTrace(this);
+    }
+    response.put("stackTrace", trace);
+    int status = errorCode != null && errorCode.isInputError() ? Response.Status.BAD_REQUEST.getStatusCode() : STATUS;
+    if (errorCode != null) {
+      response.put("errorCode", errorCode.getErrorCode());
+      response.put("message", errorCode.getDescription());
+    } else {
+      response.put("message", this.getMessage());
+    }
+    if (this.additionalDetail != null) {
+      response.put("additionalDetail", additionalDetail);
+    }
+    return Response.status(status).entity(new JSONObject(response)).type(MediaType.APPLICATION_JSON).build();
+  }
+
+  @Override
+  public String getMessage() {
+    return message;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java
index a0aa234..e1a5808 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java
@@ -26,8 +26,10 @@ import javax.ws.rs.GET;
 import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Response;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.oozie.ambari.view.exception.WfmWebException;
 
 public class WorkflowsManagerResource {
 	private final WorkflowManagerService workflowManagerService;
@@ -38,19 +40,27 @@ public class WorkflowsManagerResource {
 		this.workflowManagerService=new WorkflowManagerService(viewContext);
 	}
 
-	@GET
-	public Map<String,Object> getWorkflows(){
-	    HashMap<String,Object> result=new HashMap<>();
-	    result.put("wfprojects", workflowManagerService.getAllWorkflows(viewContext.getUsername()));
-	    return result;
-	}
-	
-	
-	@DELETE
+  @GET
+  public Response getWorkflows() {
+    try {
+      HashMap<String, Object> result = new HashMap<>();
+      result.put("wfprojects", workflowManagerService.getAllWorkflows(viewContext.getUsername()));
+      return Response.ok(result).build();
+    } catch (Exception ex) {
+      throw new WfmWebException(ex);
+    }
+  }
+
+
+  @DELETE
 	@Path("/{projectId}")
-	public void deleteWorkflow( @PathParam("projectId") String id,
-            @DefaultValue("false") @QueryParam("deleteDefinition") Boolean deleteDefinition){
-	    workflowManagerService.deleteWorkflow(id,deleteDefinition);
+	public Response deleteWorkflow(@PathParam("projectId") String id,
+                                 @DefaultValue("false") @QueryParam("deleteDefinition") Boolean deleteDefinition){
+	  try{
+      workflowManagerService.deleteWorkflow(id,deleteDefinition);
+      return Response.ok().build();
+    }catch (Exception ex) {
+      throw new WfmWebException(ex);
+    }
 	}
-	
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
index 3ccbc07..e94d51a 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
@@ -156,10 +156,12 @@ export default Ember.Component.extend(Ember.Evented, Validations, {
       }
       this.set('bundleFilePath', filePath);
       this.set("isImporting", false);
-    }.bind(this)).catch(function(e){
+    }.bind(this)).catch(function(data){
+      console.error(data);
+      this.set("errorMsg", "There is some problem while importing.");
       this.set("isImporting", false);
       this.set("isImportingSuccess", false);
-      throw new Error(e);
+      this.set("data", data);
     }.bind(this));
   },
   getBundleFromJSON(draftBundle){
@@ -363,7 +365,7 @@ export default Ember.Component.extend(Ember.Evented, Validations, {
       }.bind(this)).catch(function(e){
         this.$('#loading').hide();
         this.get("errors").pushObject({'message' : 'Could not process coordinator from ' + e.path});
-        throw new Error(e.trace);
+        throw new Error(e);
       }.bind(this));
     },
     preview(){

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
index bbd619d..4a57e37 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
@@ -274,10 +274,12 @@ export default Ember.Component.extend(Validations, Ember.Evented, {
       }
       this.set('coordinatorFilePath', filePath);
       this.set("isImporting", false);
-    }.bind(this)).catch(function(e){
+    }.bind(this)).catch(function(data){
+      console.error(data);
+      this.set("errorMsg", "There is some problem while importing.");
       this.set("isImporting", false);
       this.set("isImportingSuccess", false);
-      throw new Error(e);
+      this.set("data", data);
     }.bind(this));
   },
   getCoordinatorFromJSON(draftCoordinator){

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js
index 7a7c38d..fdb4f5e 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js
@@ -18,4 +18,53 @@
 import Ember from 'ember';
 
 export default Ember.Component.extend({
+  showingStackTrace: false,
+  hasErrorMsg : Ember.computed('errorMsg', function() {
+    return !Ember.isBlank(this.get("errorMsg"));
+  }),
+  errorMsgDetails : Ember.computed('data.responseText', function() {
+    var jsonResponse = this.getparsedResponse();
+    if (jsonResponse.message) {
+      if (jsonResponse.message.indexOf('Permission denied') >= 0) {
+        return "Permission Denied";
+      }
+      return jsonResponse.message;
+    }
+    return "";
+  }),
+  stackTrace : Ember.computed('data.responseText', function() {
+      var jsonResponse = this.getparsedResponse();
+      var stackTraceMsg = jsonResponse.stackTrace;
+      if(!stackTraceMsg){
+        return "";
+      }
+      if (stackTraceMsg instanceof Array) {
+        return stackTraceMsg.join("").replace(/\tat /g, '&nbsp;&nbsp;&nbsp;&nbsp;at&nbsp;');
+      } else {
+        return stackTraceMsg.replace(/\tat /g, '<br/>&nbsp;&nbsp;&nbsp;&nbsp;at&nbsp;');
+      }
+  }),
+  isStackTraceAvailable : Ember.computed('stackTrace', function(){
+    return this.get('stackTrace') && this.get('stackTrace').length ? true : false;
+  }),
+  getparsedResponse() {
+    var response = this.get('data.responseText');
+    if (response) {
+      try {
+        return JSON.parse(response);
+      } catch(err){
+        return "";
+      }
+    }
+    return "";
+  },
+
+  actions: {
+    showStackTrace(){
+      this.set("showingStackTrace", !this.get("showingStackTrace"));
+    },
+    closeStackTrace(){
+      this.set("showingStackTrace", false);
+    }
+  }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
index f97add8..de72c6d 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
@@ -94,13 +94,13 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
   isWorkflowImporting: false,
   isAssetPublishing: false,
   errorMsg: "",
+  data : {
+    "responseText": ""
+  },
   shouldPersist : false,
   useCytoscape: Constants.useCytoscape,
   cyOverflow: {},
   clipboard : Ember.computed.alias('clipboardService.clipboard'),
-  isStackTraceVisible: false,
-  isStackTraceAvailable: false,
-  stackTrace:"",
   showingStreamImport:false,
   fileInfo:Ember.Object.create(),
   isDraft: false,
@@ -310,24 +310,6 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
   doValidation(){
     this.validate();
   },
-  getStackTrace(data){
-    if(data){
-     try{
-      var stackTraceMsg = JSON.parse(data).stackTrace;
-      if(!stackTraceMsg){
-        return "";
-      }
-     if(stackTraceMsg instanceof Array){
-       return stackTraceMsg.join("").replace(/\tat /g, '&nbsp;&nbsp;&nbsp;&nbsp;at&nbsp;');
-     } else {
-       return stackTraceMsg.replace(/\tat /g, '<br/>&nbsp;&nbsp;&nbsp;&nbsp;at&nbsp;');
-     }
-     } catch(err){
-       return "";
-     }
-    }
-    return "";
-  },
   importWorkflow(filePath){
     var self = this;
     this.set("isWorkflowImporting", true);
@@ -343,8 +325,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       this.set("workflowFilePath", filePath);
     }.bind(this)).catch(function(data){
       console.error(data);
-      self.set("errorMsg", "There is some problem while importing.Please try again.");
-      self.showingErrorMsgInDesigner(data);
+      self.set("errorMsg", "There is some problem while importing.");
+      self.set("data", data);
       self.set("isWorkflowImporting", false);
     });
   },
@@ -491,8 +473,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
     exportActionNodeXmlDefered.promise.then(function(data){
       self.set("isAssetPublishing", false);
     }.bind(this)).catch(function(data){
-      self.set("errorMsg", "There is some problem while publishing asset. Please try again.");
-      self.showingErrorMsgInDesigner(data);
+      self.set("errorMsg", "There is some problem while publishing asset.");
+      self.set("data", data);
       self.set("isAssetPublishing", false);
     });
 
@@ -739,15 +721,6 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       this.set("showingWorkflowConfigProps",true);
     }
   },
-  showingErrorMsgInDesigner(data){
-      var self = this, stackTraceMsg = self.getStackTrace(data.responseText);
-      if(stackTraceMsg.length){
-        self.set("stackTrace", stackTraceMsg);
-        self.set("isStackTraceAvailable", true);
-      } else {
-        self.set("isStackTraceAvailable", false);
-      }
-  },
   isDraftExists(path){
     var deferred = Ember.RSVP.defer(), url, self = this;
     if(!path){
@@ -833,12 +806,6 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       });
       reader.readAsText(file);
     },
-    showStackTrace(){
-      this.set("isStackTraceVisible", true);
-    },
-    hideStackTrace(){
-      this.set("isStackTraceVisible", false);
-    },
     showWorkflowSla (value) {
       this.set('showWorkflowSla', value);
     },
@@ -1029,9 +996,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
         actionSettingsXmlDefered.promise.then(function(data){
           this.importActionSettingsFromString(data);
         }.bind(this)).catch(function(data){
-          console.error(data);
-          self.set("errorMsg", "There is some problem while importing asset.Please try again.");
-          self.showingErrorMsgInDesigner(data);
+          self.set("errorMsg", "There is some problem while importing asset.");
+          self.set("data", data);
         });
       }
     },
@@ -1047,9 +1013,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
         actionSettingsXmlDefered.promise.then(function(data){
           this.importActionNodeFromString(data);
         }.bind(this)).catch(function(data){
-          console.error(data);
-          self.set("errorMsg", "There is some problem while importing asset. Please try again.");
-          self.showingErrorMsgInDesigner(data);
+          self.set("errorMsg", "There is some problem while importing asset.");
+          self.set("data", data);
         });
       }
     },
@@ -1184,9 +1149,9 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       saveAssetConfigDefered.promise.then(function(data){
         self.set("isAssetPublishing", false);
       }.bind(this)).catch(function(data){
+        self.set("errorMsg", "There is some problem while saving asset.");
+        self.set("data", data);
         self.set("isAssetPublishing", false);
-        self.set("errorMsg", "There is some problem while saving asset. Please try again.");
-        self.showingErrorMsgInDesigner(data);
       });
     },
     showAssetList(value) {
@@ -1204,9 +1169,9 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
         self.importActionSettingsFromString(importedAsset.definition);
         self.set("isAssetImporting", false);
       }.bind(this)).catch(function(data){
+        self.set("errorMsg", "There is some problem while importing asset.");
+        self.set("data", data);
         self.set("isAssetImporting", false);
-        self.set("errorMsg", "There is some problem while importing asset. Please try again.");
-        self.showingErrorMsgInDesigner(data);
       });
     },
     showAssetNodeList(value) {
@@ -1224,9 +1189,9 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
         self.importActionNodeFromString(importedAsset.definition);
         self.set("isAssetImporting", false);
       }.bind(this)).catch(function(data){
+        self.set("errorMsg", "There is some problem while importing asset.");
+        self.set("data", data);
         self.set("isAssetImporting", false);
-        self.set("errorMsg", "There is some problem while importing asset. Please try again.");
-        self.showingErrorMsgInDesigner(data);
       });
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/resources/ui/app/components/stack-trace-dialog.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/stack-trace-dialog.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/stack-trace-dialog.js
new file mode 100644
index 0000000..dd96510
--- /dev/null
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/stack-trace-dialog.js
@@ -0,0 +1,26 @@
+/*
+*    Licensed to the Apache Software Foundation (ASF) under one or more
+*    contributor license agreements.  See the NOTICE file distributed with
+*    this work for additional information regarding copyright ownership.
+*    The ASF licenses this file to You under the Apache License, Version 2.0
+*    (the "License"); you may not use this file except in compliance with
+*    the License.  You may obtain a copy of the License at
+*
+*        http://www.apache.org/licenses/LICENSE-2.0
+*
+*    Unless required by applicable law or agreed to in writing, software
+*    distributed under the License is distributed on an "AS IS" BASIS,
+*    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+*    See the License for the specific language governing permissions and
+*    limitations under the License.
+*/
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  initialize: function(){
+    this.$('#stack_trace_dialog').modal('show');
+    this.$('#stack_trace_dialog').modal().on('hidden.bs.modal', function() {
+      this.sendAction('closeStackTrace');
+    }.bind(this));
+  }.on('didInsertElement'),
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
index e98d182..a424049 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
@@ -605,9 +605,10 @@ input:invalid {
 #configureJob .modal-dialog,
 #asset-delete-confirm-dialog .modal-dialog,
 #projectsList .modal-dialog,
-#previewModal .modal-dialog {
+#previewModal .modal-dialog,
+#stack_trace_dialog .modal-dialog {
     width: @modalDialogWidth;
-height: 100vh;
+    height: 100vh;
 }
 
 #collapseOne{
@@ -1547,15 +1548,11 @@ height: 100vh;
   padding-left: 0px;
   padding-right: 0px;
 }
-#stackTrace{
-  white-space: pre-wrap;
-  max-width: 100%;
-  max-height: 400px;
-  overflow: scroll;
-}
+
 .jobIdClass {
   width: 50px;
 }
+
 .width50 {
     white-space: nowrap;
     width: 150px;

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
index ca58431..2d374a5 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
@@ -72,7 +72,7 @@
     <div id='loading'>
       {{spin-spinner lines=13 length=20 width=10}}
     </div>
-    {{designer-errors errors=errors}}
+    {{designer-errors errors=errors validationErrors=validationErrors errorMsg=errorMsg data=data}}
     <form class="form-horizontal">
       <div class="col-sm-12 paddingtop10">
         <div class="col-sm-8 centralize-panel">
@@ -100,7 +100,7 @@
                   <li class="list-group-item">No Coordinators Configured.</li>
                   {{/each}}
                 </ul>
-                {{#field-error model=this field='bundle.coordinators' showErrorMessage=true}}{{/field-error}}
+                {{#field-error model=this field='bundle.coordinators' showErrorMessage=showErrorMessage}}{{/field-error}}
 
               {{#if coordinatorCreateMode}}
               {{#bundle-coord-config coordinator=currentCoordinator openTab="openTab" openFileBrowser="openFileBrowser" add="addCoordinator" cancel="cancelCoordinatorOperation" createMode=coordinatorCreateMode}}{{/bundle-coord-config}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
index 7b607ca..7db5ce2 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
@@ -93,7 +93,7 @@
   {{spin-spinner lines=13 length=20 width=10}}
 </div>
 <div class="container-fluid">
-  {{designer-errors errors=errors}}
+  {{designer-errors errors=errors validationErrors=validationErrors errorMsg=errorMsg data=data}}
   <form class="form-horizontal">
     <div class="col-sm-12 paddingtop10">
       <div class="col-sm-8 centralize-panel">

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs
index 8438255..00cb8a6 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs
@@ -34,4 +34,21 @@
       {{/if}}
     </div>
   {{/if}}
+
+  {{#if hasErrorMsg}}
+    <div id="loader">
+        <div id="alert"class="alert alert-danger alert-dismissible workflow-error" role="alert">
+            {{errorMsg}}
+            <div id="errorMsgDetails">
+              {{errorMsgDetails}}
+              {{#if isStackTraceAvailable}}
+                <a href="#" class="action-link" {{action "showStackTrace"}}>Details</a>
+              {{/if}}
+            </div>
+        </div>
+    </div>
+  {{/if}}
 </div>
+{{#if showingStackTrace}}
+  {{#stack-trace-dialog title="Stack Trace" stackTrace=stackTrace closeStackTrace="closeStackTrace"}}{{/stack-trace-dialog}}
+{{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
index 1a73421..4ff9d87 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
@@ -138,7 +138,7 @@
   {{/if}}
   <div  id="content" class="panel panel-default designer-main-panel col-xs-20">
     <div class="designer-panel designer-canvas">
-      {{designer-errors errors=errors validationErrors=validationErrors}}
+      {{designer-errors errors=errors validationErrors=validationErrors errorMsg=errorMsg data=data}}
       {{#if undoAvailable}}
         <div id="alert"class="alert alert-warning workflow-error" role="alert">
           {{#if (eq undoType 'nodeDeleted')}}
@@ -157,22 +157,6 @@
           </div>
       </div>
       {{/if}}
-      {{#if (not (eq errorMsg ""))}}
-        <div id="loader">
-            <div id="alert"class="alert alert-danger alert-dismissible workflow-error" role="alert">
-                {{errorMsg}}
-                {{#if isStackTraceAvailable}}
-                  {{#if isStackTraceVisible}}
-                    <a href="#" class="action-link" {{action "hideStackTrace"}}>Hide Log</a>
-                    <div id="stackTrace">{{{stackTrace}}}</div>
-                  {{/if}}
-                  {{#unless isStackTraceVisible}}
-                    <a href="#" class="action-link" {{action "showStackTrace"}}>Show Log</a>
-                  {{/unless}}
-                {{/if}}
-            </div>
-        </div>
-      {{/if}}
       {{#if isAssetPublishing}}
         <div id="loader">
             <div id="alert"class="alert alert-info alert-dismissible workflow-error" role="alert">

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/stack-trace-dialog.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/stack-trace-dialog.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/stack-trace-dialog.hbs
new file mode 100644
index 0000000..b82726b
--- /dev/null
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/stack-trace-dialog.hbs
@@ -0,0 +1,33 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+<div id="stack_trace_dialog" class="modal fade" role="dialog">
+  <div class="modal-dialog">
+    <div class="modal-content">
+      <div class="modal-header">
+        <button type="button" class="close" data-dismiss="modal">&times;</button>
+        <h4 class="modal-title">{{title}}</h4>
+      </div>
+      <div class="modal-body">
+        <div id="stackTrace">{{{stackTrace}}}</div>
+      </div>
+      <div class="modal-footer">
+        <button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
+      </div>
+    </div>
+  </div>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9fdeec1a/contrib/views/wfmanager/src/main/resources/ui/tests/integration/components/stack-trace-dialog-test.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/tests/integration/components/stack-trace-dialog-test.js b/contrib/views/wfmanager/src/main/resources/ui/tests/integration/components/stack-trace-dialog-test.js
new file mode 100644
index 0000000..64e3f90
--- /dev/null
+++ b/contrib/views/wfmanager/src/main/resources/ui/tests/integration/components/stack-trace-dialog-test.js
@@ -0,0 +1,40 @@
+/*
+ *    Licensed to the Apache Software Foundation (ASF) under one or more
+ *    contributor license agreements.  See the NOTICE file distributed with
+ *    this work for additional information regarding copyright ownership.
+ *    The ASF licenses this file to You under the Apache License, Version 2.0
+ *    (the "License"); you may not use this file except in compliance with
+ *    the License.  You may obtain a copy of the License at
+ *
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *    Unless required by applicable law or agreed to in writing, software
+ *    distributed under the License is distributed on an "AS IS" BASIS,
+ *    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *    See the License for the specific language governing permissions and
+ *    limitations under the License.
+ */
+import { moduleForComponent, test } from 'ember-qunit';
+import hbs from 'htmlbars-inline-precompile';
+
+moduleForComponent('stack-trace-dialog', 'Integration | Component | stack trace dialog', {
+  integration: true
+});
+
+test('it renders', function(assert) {
+  // Set any properties with this.set('myProperty', 'value');
+  // Handle any actions with this.on('myAction', function(val) { ... });"
+
+  this.render(hbs`{{stack-trace-dialog}}`);
+
+  assert.equal(this.$().text().trim(), '');
+
+  // Template block usage:"
+  this.render(hbs`
+    {{#stack-trace-dialog}}
+      template block text
+    {{/stack-trace-dialog}}
+  `);
+
+  assert.equal(this.$().text().trim(), 'template block text');
+});


[02/50] ambari git commit: AMBARI-20073. Side Nav: multiple changes based on feedback from UX - 2.(xiwang)

Posted by nc...@apache.org.
AMBARI-20073. Side Nav: multiple changes based on feedback from UX - 2.(xiwang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d7e9ef21
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d7e9ef21
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d7e9ef21

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d7e9ef21dfa7bca1b91d94398b615f41908983c7
Parents: 36a01d4
Author: Xi Wang <xi...@apache.org>
Authored: Tue Feb 21 14:53:45 2017 -0800
Committer: Xi Wang <xi...@apache.org>
Committed: Tue Feb 21 14:53:52 2017 -0800

----------------------------------------------------------------------
 ambari-web/app/styles/application.less          |  1 -
 .../app/styles/theme/bootstrap-ambari.css       | 14 +++-
 ambari-web/app/templates/application.hbs        | 76 ++++++++++----------
 .../vendor/scripts/theme/bootstrap-ambari.js    |  9 +++
 4 files changed, 59 insertions(+), 41 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d7e9ef21/ambari-web/app/styles/application.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/application.less b/ambari-web/app/styles/application.less
index befbf5a..b2a7706 100644
--- a/ambari-web/app/styles/application.less
+++ b/ambari-web/app/styles/application.less
@@ -69,7 +69,6 @@ ul.typeahead.dropdown-menu {
   overflow: visible;
   padding-bottom: @footer-height;
   min-width: 980px;
-  margin-left: @side-navigation-width;
 
    .clock-view {
      top: 10px;

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7e9ef21/ambari-web/app/styles/theme/bootstrap-ambari.css
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/theme/bootstrap-ambari.css b/ambari-web/app/styles/theme/bootstrap-ambari.css
index 5297c77..af78310 100644
--- a/ambari-web/app/styles/theme/bootstrap-ambari.css
+++ b/ambari-web/app/styles/theme/bootstrap-ambari.css
@@ -823,7 +823,6 @@ input.radio:checked + label:after {
 .navigation-bar-container ul.nav.side-nav-footer li.submenu-li > a,
 .navigation-bar-container ul.nav.side-nav-menu li.mainmenu-li > a,
 .navigation-bar-container ul.nav.side-nav-footer li.mainmenu-li > a {
-  padding: 15px 5px 15px 25px;
   display: table-cell;
   vertical-align: middle;
   width: 190px;
@@ -868,6 +867,16 @@ input.radio:checked + label:after {
   color: #b8bec4;
   padding: 3px 5px 3px 10px;
 }
+.navigation-bar-container ul.nav.side-nav-menu li.navigation-footer > a,
+.navigation-bar-container ul.nav.side-nav-footer li.navigation-footer > a,
+.navigation-bar-container ul.nav.side-nav-menu li.mainmenu-li > a,
+.navigation-bar-container ul.nav.side-nav-footer li.mainmenu-li > a {
+  padding: 15px 5px 15px 25px;
+}
+.navigation-bar-container ul.nav.side-nav-menu li.submenu-li > a,
+.navigation-bar-container ul.nav.side-nav-footer li.submenu-li > a {
+  padding: 10px 5px 10px 25px;
+}
 .navigation-bar-container ul.nav.side-nav-menu li.navigation-footer,
 .navigation-bar-container ul.nav.side-nav-footer li.navigation-footer {
   background: #313d54;
@@ -1005,6 +1014,7 @@ input.radio:checked + label:after {
 .navigation-bar-container ul.nav.side-nav-menu .menu-item-name,
 .navigation-bar-container ul.nav.side-nav-footer .menu-item-name {
   display: inline-block;
+  vertical-align: bottom;
   max-width: 94px;
   overflow: hidden;
   text-overflow: ellipsis;
@@ -1068,7 +1078,7 @@ input.radio:checked + label:after {
 }
 .navigation-bar-container.collapsed ul.nav.side-nav-menu li.submenu-li > a,
 .navigation-bar-container.collapsed ul.nav.side-nav-footer li.submenu-li > a {
-  padding: 15px 5px 15px 25px;
+  padding: 10px 5px 10px 25px;
   width: 190px;
 }
 .navigation-bar-container.collapsed ul.nav.side-nav-menu li.active,

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7e9ef21/ambari-web/app/templates/application.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/application.hbs b/ambari-web/app/templates/application.hbs
index 573d92b..9d6db78 100644
--- a/ambari-web/app/templates/application.hbs
+++ b/ambari-web/app/templates/application.hbs
@@ -21,46 +21,46 @@
 {{/if}}
 
 {{#if App.router.mainController.isClusterDataLoaded}}
-<div id="side-nav" class="navigation-bar">
-  <div class="navigation-bar-container">
-  <ul class="side-nav-header nav nav-pills nav-stacked">
-    <li class="navigation-header">
-      {{#if enableLinks}}
-        <a {{translateAttr href="topnav.logo.href"}} class="ambari-logo">
-          <img src="/img/ambari-logo.png" alt="Apache Ambari" title="Apache Ambari">
-        </a>
-      {{else}}
-        <a class="ambari-logo">
-          <img src="/img/ambari-logo.png" alt="Apache Ambari" title="Apache Ambari">
-        </a>
-      {{/if}}
-      <div class="btn-group">
-        <div class="dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
-          <span class="ambari-header" title="Apache Ambari">{{t app.name}}</span>
-          <span class="toggle-icon glyphicon glyphicon-triangle-bottom"></span>
+  <div id="side-nav" class="navigation-bar">
+    <div class="navigation-bar-container">
+    <ul class="side-nav-header nav nav-pills nav-stacked">
+      <li class="navigation-header">
+        {{#if enableLinks}}
+          <a {{translateAttr href="topnav.logo.href"}} class="ambari-logo">
+            <img src="/img/ambari-logo.png" alt="Apache Ambari" title="Apache Ambari">
+          </a>
+        {{else}}
+          <a class="ambari-logo">
+            <img src="/img/ambari-logo.png" alt="Apache Ambari" title="Apache Ambari">
+          </a>
+        {{/if}}
+        <div class="btn-group">
+          <div class="dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
+            <span class="ambari-header" title="Apache Ambari">{{t app.name}}</span>
+            <span class="toggle-icon glyphicon glyphicon-triangle-bottom"></span>
+          </div>
+          <ul class="dropdown-menu">
+            {{#if view.views.length}}
+              {{#each item in view.views}}
+                <li><a class="" href="#" {{action "setView" item target="App.router.mainViewsController"}}>{{item.label}}</a></li>
+              {{/each}}
+            {{else}}
+              <li class="disabled"><a href="javascript:void(null);">{{t menu.item.views.noViews}}</a></li>
+            {{/if}}
+          </ul>
         </div>
-        <ul class="dropdown-menu">
-          {{#if view.views.length}}
-            {{#each item in view.views}}
-              <li><a class="" href="#" {{action "setView" item target="App.router.mainViewsController"}}>{{item.label}}</a></li>
-            {{/each}}
-          {{else}}
-            <li class="disabled"><a href="javascript:void(null);">{{t menu.item.views.noViews}}</a></li>
-          {{/if}}
-        </ul>
-      </div>
-    </li>
-  </ul>
-  {{view App.MainSideMenuView}}
-  <ul class="side-nav-footer nav nav-pills nav-stacked">
-    <li class="navigation-footer">
-      <a href="#" data-toggle="collapse-side-nav">
-        <span class="navigation-icon icon-double-angle-left"></span>
-      </a>
-    </li>
-  </ul>
+      </li>
+    </ul>
+    {{view App.MainSideMenuView}}
+    <ul class="side-nav-footer nav nav-pills nav-stacked">
+      <li class="navigation-footer">
+        <a href="#" data-toggle="collapse-side-nav">
+          <span class="navigation-icon icon-double-angle-left"></span>
+        </a>
+      </li>
+    </ul>
+    </div>
   </div>
-</div>
 {{/if}}
 
 <div id="main">

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7e9ef21/ambari-web/vendor/scripts/theme/bootstrap-ambari.js
----------------------------------------------------------------------
diff --git a/ambari-web/vendor/scripts/theme/bootstrap-ambari.js b/ambari-web/vendor/scripts/theme/bootstrap-ambari.js
index eea87c4..77973a9 100644
--- a/ambari-web/vendor/scripts/theme/bootstrap-ambari.js
+++ b/ambari-web/vendor/scripts/theme/bootstrap-ambari.js
@@ -35,6 +35,15 @@
         $(this).addClass('navigation-bar-fit-height');
       }
 
+      //set main content left margin based on the width of side-nav
+      var containerWidth = $navigationContainer.width();
+      if (settings.moveLeftContent) {
+        $(settings.content).css('margin-left', containerWidth);
+      }
+      if (settings.moveLeftFooter) {
+        $(settings.footer).css('margin-left', containerWidth);
+      }
+
       function popStateHandler() {
         var path = window.location.pathname + window.location.hash;
         $navigationContainer.find('li a').each(function (index, link) {


[30/50] ambari git commit: AMBARI-20093. Cannot edit join node name in Workflow Manager.(Belliraj HB via gauravn7)

Posted by nc...@apache.org.
AMBARI-20093. Cannot edit join node name in Workflow Manager.(Belliraj HB via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5186db06
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5186db06
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5186db06

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 5186db06659916148229fbd02aa742b7caeaa2f9
Parents: ba470c1
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Thu Feb 23 11:18:44 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Thu Feb 23 11:19:36 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/domain/cytoscape-flow-renderer.js  | 2 +-
 .../src/main/resources/ui/app/domain/cytoscape-style.js      | 8 ++++++++
 .../resources/ui/app/templates/components/flow-designer.hbs  | 2 +-
 3 files changed, 10 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5186db06/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
index 51e0461..75ceecb 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
@@ -19,7 +19,7 @@ import Ember from 'ember';
 import CytoscapeStyles from '../domain/cytoscape-style';
 var CytoscapeRenderer= Ember.Object.extend({
   currentCyNode: null,
-  staticNodes: ['start', 'end', 'join', 'placeholder'],
+  staticNodes: ['start', 'end', 'placeholder'],
   dataNodes: [],
   cyOverflow: {},
   cy: null,

http://git-wip-us.apache.org/repos/asf/ambari/blob/5186db06/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-style.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-style.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-style.js
index 2eb01d1..e05b782 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-style.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-style.js
@@ -18,6 +18,7 @@
 import Ember from 'ember';
 var defaultNodeColor = '#fff';
 var actionNodeColor = '#f5f5f5';
+var killNodeColor='#d43f3a'
 var labelFunction=function(target) {
   if (!target.data().node.name) {
     return "";
@@ -114,6 +115,13 @@ export default Ember.Object.create({
             return "none";
           }
         },
+        'color': function(target){
+              if (!target.data().transition || !target.data().transition.isOnError()) {
+                return "black"
+              }else{
+                return killNodeColor;
+              }
+        },
         width: 1,
         'font-size': 12,
         label: function(target) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/5186db06/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
index 0356e9c..1a73421 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
@@ -197,7 +197,7 @@
         <div id="cyRenderer">
           <div id="{{cyId}}" class="cy-panel"></div>
           {{#each dataNodes as |dataNode|}}
-            {{#if (or (eq dataNode.data.type 'action') (eq dataNode.data.type 'decision') (eq dataNode.data.type 'fork'))}}
+            {{#if (or (eq dataNode.data.type 'action') (eq dataNode.data.type 'decision') (eq dataNode.data.type 'fork')  (eq dataNode.data.type 'join'))}}
               <div id="{{dataNode.data.id}}" class="">
                 {{input required pattern="([a-zA-Z_]([\-_a-zA-Z0-9])*){1,39}" name="actionName" data-toggle="tooltip" title=dataNode.name classBinding="dataNode.data.node.errors:error:editable" class="editableNode overlay_node_editor" value=dataNode.dataNodeName placeholder="Action Name"}}
               </div>


[18/50] ambari git commit: AMBARI-20112. Add polyfills for Array.includes and String.includes (onechiporenko)

Posted by nc...@apache.org.
AMBARI-20112. Add polyfills for Array.includes and String.includes (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/813841f8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/813841f8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/813841f8

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 813841f871525a5e78a4fd551ab77ddb8c67e5b2
Parents: f080bd6
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Wed Feb 22 13:31:19 2017 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Wed Feb 22 13:44:32 2017 +0200

----------------------------------------------------------------------
 .../app/styles/theme/bootstrap-ambari.css       | 17 ++++++++
 ambari-web/test/init_test.js                    | 42 ++++++++++++++++++++
 2 files changed, 59 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/813841f8/ambari-web/app/styles/theme/bootstrap-ambari.css
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/theme/bootstrap-ambari.css b/ambari-web/app/styles/theme/bootstrap-ambari.css
index af78310..2c84f88 100644
--- a/ambari-web/app/styles/theme/bootstrap-ambari.css
+++ b/ambari-web/app/styles/theme/bootstrap-ambari.css
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 @font-face {
   font-family: 'Roboto';
   font-weight: normal;

http://git-wip-us.apache.org/repos/asf/ambari/blob/813841f8/ambari-web/test/init_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/init_test.js b/ambari-web/test/init_test.js
index df64cbe..cc97174 100644
--- a/ambari-web/test/init_test.js
+++ b/ambari-web/test/init_test.js
@@ -49,6 +49,48 @@ if (!Function.prototype.bind) {
   };
 }
 
+if (!String.prototype.includes) {
+  String.prototype.includes = function (search, start) {
+    'use strict';
+    var _start = start;
+    if (typeof start !== 'number') {
+      _start = 0;
+    }
+
+    if (_start + search.length > this.length) {
+      return false;
+    }
+    return this.indexOf(search, _start) !== -1;
+  };
+}
+
+if (!Array.prototype.includes) {
+  Object.defineProperty(Array.prototype, 'includes', {
+    value: function (searchElement, fromIndex) {
+
+      if (!this) {
+        throw new TypeError('"this" is null or not defined');
+      }
+
+      var o = Object(this);
+      var len = o.length >>> 0;
+      if (len === 0) {
+        return false;
+      }
+      var n = fromIndex | 0;
+      var k = Math.max(n >= 0 ? n : len - Math.abs(n), 0);
+
+      while (k < len) {
+        if (o[k] === searchElement) {
+          return true;
+        }
+        k++;
+      }
+      return false;
+    }
+  });
+}
+
 Number.isFinite = Number.isFinite || function(value) {
   return typeof value === 'number' && isFinite(value);
 };
\ No newline at end of file


[06/50] ambari git commit: Merge Side-Navigation-feature-branch 1

Posted by nc...@apache.org.
Merge Side-Navigation-feature-branch 1


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4ff93b0a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4ff93b0a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4ff93b0a

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 4ff93b0a291745ce18d36a73b1698fdea55539a3
Parents: b986a3c 65d9017
Author: Xi Wang <xi...@apache.org>
Authored: Tue Feb 21 16:55:36 2017 -0800
Committer: Xi Wang <xi...@apache.org>
Committed: Tue Feb 21 16:55:36 2017 -0800

----------------------------------------------------------------------
 .../main/resources/ui/admin-web/app/index.html  |   2 +-
 ambari-web/app/assets/data/clusters/info.json   |  11 +
 ambari-web/app/assets/img/ambari-Logo.png       | Bin 0 -> 2779 bytes
 ambari-web/app/assets/img/logo-white.png        | Bin 4538 -> 0 bytes
 ambari-web/app/config.js                        |   6 +-
 .../main/alert_definitions_controller.js        |  14 +-
 .../main/alerts/alert_instances_controller.js   | 151 -----
 ambari-web/app/messages.js                      |   5 +
 ambari-web/app/routes/main.js                   |   4 +
 ambari-web/app/styles/application.less          |  14 +-
 ambari-web/app/styles/common.less               |   7 +-
 .../app/styles/theme/bootstrap-ambari.css       | 634 ++++++++++++++++++-
 ambari-web/app/styles/top-nav.less              | 204 ++----
 ambari-web/app/templates/application.hbs        | 226 ++++---
 .../main/alerts/alert_notifications_popup.hbs   |  69 ++
 ambari-web/app/templates/main/dashboard.hbs     |  26 +-
 ambari-web/app/templates/main/menu.hbs          |  23 -
 ambari-web/app/templates/main/menu_item.hbs     |  50 --
 ambari-web/app/templates/main/service.hbs       |   6 +-
 .../main/service/all_services_actions.hbs       | 100 ++-
 .../app/templates/main/service/menu_item.hbs    |   4 +-
 .../app/templates/main/side-menu-item.hbs       |  53 ++
 ambari-web/app/views.js                         |   1 +
 ambari-web/app/views/application.js             | 125 +++-
 .../main/alerts/alert_instances_popup_view.js   | 190 ++++++
 ambari-web/app/views/main/menu.js               | 179 ++++--
 .../views/main/service/all_services_actions.js  |   2 +
 ambari-web/app/views/main/service/menu.js       | 104 +--
 ambari-web/brunch-config.js                     |   3 +-
 .../alerts/alert_instances_controller_test.js   |  33 -
 ambari-web/test/views/main/menu_test.js         |   2 +-
 .../vendor/scripts/theme/bootstrap-ambari.js    | 204 ++++++
 32 files changed, 1692 insertions(+), 760 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4ff93b0a/ambari-web/app/messages.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4ff93b0a/ambari-web/app/routes/main.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4ff93b0a/ambari-web/app/styles/application.less
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4ff93b0a/ambari-web/app/views.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4ff93b0a/ambari-web/app/views/main/service/menu.js
----------------------------------------------------------------------


[48/50] ambari git commit: AMBARI-20144 Ambari logo is not displayed. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-20144 Ambari logo is not displayed. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/56eb5a75
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/56eb5a75
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/56eb5a75

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 56eb5a7590499c6e02d2f0688ad85eff47fbcc96
Parents: 307c8ad
Author: ababiichuk <ab...@hortonworks.com>
Authored: Thu Feb 23 18:08:11 2017 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Thu Feb 23 18:08:11 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/assets/img/ambari-Logo.png | Bin 2779 -> 0 bytes
 ambari-web/app/assets/img/ambari-logo.png | Bin 0 -> 2779 bytes
 2 files changed, 0 insertions(+), 0 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/56eb5a75/ambari-web/app/assets/img/ambari-Logo.png
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/img/ambari-Logo.png b/ambari-web/app/assets/img/ambari-Logo.png
deleted file mode 100644
index 07d31ee..0000000
Binary files a/ambari-web/app/assets/img/ambari-Logo.png and /dev/null differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/56eb5a75/ambari-web/app/assets/img/ambari-logo.png
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/img/ambari-logo.png b/ambari-web/app/assets/img/ambari-logo.png
new file mode 100644
index 0000000..07d31ee
Binary files /dev/null and b/ambari-web/app/assets/img/ambari-logo.png differ


[47/50] ambari git commit: AMBARI-20123 Click feedback/disable toggle is missing for transition buttons in Kerberos Wizard - prone to various failures (dbuzhor)

Posted by nc...@apache.org.
AMBARI-20123 Click feedback/disable toggle is missing for transition buttons in Kerberos Wizard - prone to various failures (dbuzhor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/307c8ad2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/307c8ad2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/307c8ad2

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 307c8ad26cdefa08e0dfc9454655c16dc603d1ff
Parents: b68bb74
Author: Denys Buzhor <bd...@hortonworks.com>
Authored: Thu Feb 23 10:52:04 2017 +0200
Committer: Denys Buzhor <bd...@hortonworks.com>
Committed: Thu Feb 23 16:32:16 2017 +0200

----------------------------------------------------------------------
 .../main/admin/kerberos/step1_controller.js     |  2 +-
 .../main/admin/kerberos/step2_controller.js     | 10 ++-
 .../main/admin/kerberos/step4_controller.js     |  2 +
 .../main/admin/kerberos/step5_controller.js     |  4 +
 .../wizard/wizardProgressPageController.js      |  2 +
 ambari-web/app/routes/add_kerberos_routes.js    | 50 ++++++-------
 ambari-web/app/styles/common.less               |  6 ++
 .../app/templates/common/button_progress.hbs    | 29 ++++++++
 ambari-web/app/templates/common/progress.hbs    |  6 +-
 .../app/templates/main/admin/kerberos/step1.hbs |  2 +-
 .../app/templates/main/admin/kerberos/step2.hbs |  5 +-
 .../app/templates/main/admin/kerberos/step3.hbs |  4 +-
 .../app/templates/main/admin/kerberos/step4.hbs |  5 +-
 .../app/templates/main/admin/kerberos/step5.hbs | 13 +++-
 ambari-web/app/views.js                         |  2 +
 .../common/buttons/button_progress_view.js      | 77 ++++++++++++++++++++
 .../app/views/common/buttons/wizard_buttons.js  | 36 +++++++++
 .../admin/kerberos/step1_controller_test.js     |  6 +-
 .../admin/kerberos/step2_controller_test.js     |  2 +-
 19 files changed, 213 insertions(+), 50 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/controllers/main/admin/kerberos/step1_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/kerberos/step1_controller.js b/ambari-web/app/controllers/main/admin/kerberos/step1_controller.js
index c0decfc..2e41e3d 100644
--- a/ambari-web/app/controllers/main/admin/kerberos/step1_controller.js
+++ b/ambari-web/app/controllers/main/admin/kerberos/step1_controller.js
@@ -144,7 +144,7 @@ App.KerberosWizardStep1Controller = Em.Controller.extend({
     }
   },
 
-  next: function () {
+  submit: function () {
     if (!this.get('isSubmitDisabled')) {
       App.router.send('next');
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js b/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js
index 992fd34..258a384 100644
--- a/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js
+++ b/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js
@@ -60,16 +60,17 @@ App.KerberosWizardStep2Controller = App.WizardStep7Controller.extend(App.KDCCred
    * Should Back-button be disabled
    * @type {boolean}
    */
-  isBackBtnDisabled: Em.computed.alias('testConnectionInProgress'),
+  isBackBtnDisabled: Em.computed.or('testConnectionInProgress', 'App.router.nextBtnClickInProgress'),
 
   /**
    * Should Next-button be disabled
    * @type {boolean}
    */
   isSubmitDisabled: function () {
-    if (!this.get('stepConfigs.length') || this.get('testConnectionInProgress') || this.get('submitButtonClicked')) return true;
+    if (!this.get('stepConfigs.length') || this.get('testConnectionInProgress')
+      || this.get('submitButtonClicked') || App.get('router.nextBtnClickInProgress')) return true;
     return (!this.get('stepConfigs').filterProperty('showConfig', true).everyProperty('errorCount', 0) || this.get("miscModalVisible"));
-  }.property('stepConfigs.@each.errorCount', 'miscModalVisible', 'submitButtonClicked', 'testConnectionInProgress'),
+  }.property('stepConfigs.@each.errorCount', 'miscModalVisible', 'submitButtonClicked', 'testConnectionInProgress', 'App.router.nextBtnClickInProgress'),
 
   hostNames: Em.computed.alias('App.allHostNames'),
 
@@ -170,8 +171,8 @@ App.KerberosWizardStep2Controller = App.WizardStep7Controller.extend(App.KDCCred
 
   submit: function () {
     var self = this;
-
     if (this.get('isSubmitDisabled')) return false;
+    App.set('router.nextBtnClickInProgress', true);
     this.get('wizardController').deleteKerberosService().always(function () {
       self.configureKerberos();
     });
@@ -183,6 +184,7 @@ App.KerberosWizardStep2Controller = App.WizardStep7Controller.extend(App.KDCCred
     var callback = function () {
       self.createConfigurations().done(function () {
         self.createKerberosAdminSession().done(function () {
+          App.set('router.nextBtnClickInProgress', false);
           App.router.send('next');
         });
       });

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/controllers/main/admin/kerberos/step4_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/kerberos/step4_controller.js b/ambari-web/app/controllers/main/admin/kerberos/step4_controller.js
index 8e5abd5..f23814c 100644
--- a/ambari-web/app/controllers/main/admin/kerberos/step4_controller.js
+++ b/ambari-web/app/controllers/main/admin/kerberos/step4_controller.js
@@ -29,6 +29,7 @@ App.KerberosWizardStep4Controller = App.WizardStep7Controller.extend(App.AddSecu
 
   clearStep: function() {
     this.set('isRecommendedLoaded', false);
+    this.set('submitButtonClicked', false);
     this.set('selectedService', null);
     this.set('stepConfigs', []);
   },
@@ -312,6 +313,7 @@ App.KerberosWizardStep4Controller = App.WizardStep7Controller.extend(App.AddSecu
   },
 
   submit: function() {
+    this.set('submitButtonClicked', true);
     this.saveConfigurations();
     App.router.send('next');
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/controllers/main/admin/kerberos/step5_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/kerberos/step5_controller.js b/ambari-web/app/controllers/main/admin/kerberos/step5_controller.js
index 9cb75e0..ac36c6c 100644
--- a/ambari-web/app/controllers/main/admin/kerberos/step5_controller.js
+++ b/ambari-web/app/controllers/main/admin/kerberos/step5_controller.js
@@ -48,6 +48,8 @@ App.KerberosWizardStep5Controller = App.KerberosProgressPageController.extend({
     }
   ],
 
+  isCSVRequestInProgress: false,
+
   submit: function() {
     App.router.send('next');
   },
@@ -56,6 +58,7 @@ App.KerberosWizardStep5Controller = App.KerberosProgressPageController.extend({
    * get CSV data from the server
    */
   getCSVData: function (skipDownload) {
+    this.set('isCSVRequestInProgress', true);
     return App.ajax.send({
       name: 'admin.kerberos.cluster.csv',
       sender: this,
@@ -75,6 +78,7 @@ App.KerberosWizardStep5Controller = App.KerberosProgressPageController.extend({
    */
   getCSVDataSuccessCallback: function (data, opt, params) {
     this.set('csvData', this.prepareCSVData(data.split('\n')));
+    this.set('isCSVRequestInProgress', false);
     if (!Em.get(params, 'skipDownload')) {
       fileUtils.downloadTextFile(stringUtils.arrayToCSV(this.get('csvData')), 'csv', 'kerberos.csv');
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/mixins/wizard/wizardProgressPageController.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/wizard/wizardProgressPageController.js b/ambari-web/app/mixins/wizard/wizardProgressPageController.js
index d426eda..96eb0ae 100644
--- a/ambari-web/app/mixins/wizard/wizardProgressPageController.js
+++ b/ambari-web/app/mixins/wizard/wizardProgressPageController.js
@@ -731,6 +731,7 @@ App.wizardProgressPageControllerMixin = Em.Mixin.create(App.InstallComponent, {
 
   done: function () {
     if (!this.get('isSubmitDisabled')) {
+      this.set('isSubmitDisabled', true);
       this.removeObserver('tasks.@each.status', this, 'onTaskStatusChange');
       App.router.send('next');
     }
@@ -738,6 +739,7 @@ App.wizardProgressPageControllerMixin = Em.Mixin.create(App.InstallComponent, {
 
   back: function () {
     if (!this.get('isBackButtonDisabled')) {
+      this.set('isBackButtonDisabled', true);
       this.removeObserver('tasks.@each.status', this, 'onTaskStatusChange');
       App.router.send('back');
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/routes/add_kerberos_routes.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/add_kerberos_routes.js b/ambari-web/app/routes/add_kerberos_routes.js
index fe21dae..462ce44 100644
--- a/ambari-web/app/routes/add_kerberos_routes.js
+++ b/ambari-web/app/routes/add_kerberos_routes.js
@@ -111,7 +111,7 @@ module.exports = App.WizardRoute.extend({
     });
   },
 
-  step1: Em.Route.extend({
+  step1: App.StepRoute.extend({
     route: '/step1',
 
     connectOutlets: function (router) {
@@ -128,7 +128,7 @@ module.exports = App.WizardRoute.extend({
       return false;
     },
 
-    next: function (router) {
+    nextTransition: function (router) {
       var kerberosWizardController = router.get('kerberosWizardController');
       var kerberosStep1controller = router.get('kerberosWizardStep1Controller');
 
@@ -139,7 +139,7 @@ module.exports = App.WizardRoute.extend({
     }
   }),
 
-  step2: Em.Route.extend({
+  step2: App.StepRoute.extend({
     route: '/step2',
 
     connectOutlets: function (router) {
@@ -156,16 +156,14 @@ module.exports = App.WizardRoute.extend({
     unroutePath: function () {
       return false;
     },
-    back: function(router) {
+    backTransition: function(router) {
       var controller = router.get('kerberosWizardStep2Controller');
       var kerberosWizardController = router.get('kerberosWizardController');
-      if (!controller.get('isBackBtnDisabled')) {
-        kerberosWizardController.overrideVisibility(controller.get('configs'), true, []);
-        router.transitionTo('step1');
-      }
+      kerberosWizardController.overrideVisibility(controller.get('configs'), true, []);
+      router.transitionTo('step1');
     },
 
-    next: function (router) {
+    nextTransition: function (router) {
       var kerberosWizardController = router.get('kerberosWizardController');
       var kerberosWizardStep2Controller = router.get('kerberosWizardStep2Controller');
 
@@ -187,7 +185,7 @@ module.exports = App.WizardRoute.extend({
     }
   }),
 
-  step3: Em.Route.extend({
+  step3: App.StepRoute.extend({
     route: '/step3',
 
     connectOutlets: function (router) {
@@ -203,7 +201,7 @@ module.exports = App.WizardRoute.extend({
       return false;
     },
     back: Em.Router.transitionTo('step2'),
-    next: function (router) {
+    nextTransition: function (router) {
       var kerberosWizardController = router.get('kerberosWizardController');
       kerberosWizardController.setDBProperty('kerberosDescriptorConfigs', null);
       kerberosWizardController.clearCachedStepConfigValues(router.get('kerberosWizardStep4Controller'));
@@ -211,7 +209,7 @@ module.exports = App.WizardRoute.extend({
     }
   }),
 
-  step4: Em.Route.extend({
+  step4: App.StepRoute.extend({
     route: '/step4',
 
     connectOutlets: function (router) {
@@ -230,14 +228,14 @@ module.exports = App.WizardRoute.extend({
     unroutePath: function () {
       return false;
     },
-    back: function (router) {
+    backTransition: function (router) {
       if (router.get('kerberosWizardController.skipClientInstall')) {
         router.transitionTo('step2');
       } else {
         router.transitionTo('step3');
       }
     },
-    next: function (router) {
+    nextTransition: function (router) {
       var kerberosWizardController = router.get('kerberosWizardController');
       var step5Controller = router.get('kerberosWizardStep5Controller');
       var kerberosDescriptor = kerberosWizardController.get('kerberosDescriptorConfigs');
@@ -252,7 +250,7 @@ module.exports = App.WizardRoute.extend({
     }
   }),
 
-  step5: Em.Route.extend({
+  step5: App.StepRoute.extend({
     route: '/step5',
 
     connectOutlets: function (router) {
@@ -280,9 +278,9 @@ module.exports = App.WizardRoute.extend({
       kerberosWizardStep5Controller.getCSVData();
     },
 
-    back: Em.Router.transitionTo('step4'),
+    backTransition: Em.Router.transitionTo('step4'),
 
-    next: function (router) {
+    nextTransition: function (router) {
       var kerberosWizardController = router.get('kerberosWizardController');
       kerberosWizardController.setDBProperties({
         tasksStatuses: null,
@@ -292,7 +290,7 @@ module.exports = App.WizardRoute.extend({
     }
   }),
 
-  step6: Em.Route.extend({
+  step6: App.StepRoute.extend({
     route: '/step6',
 
     connectOutlets: function (router) {
@@ -308,8 +306,8 @@ module.exports = App.WizardRoute.extend({
     unroutePath: function () {
       return false;
     },
-    back: Em.Router.transitionTo('step4'),
-    next: function (router) {
+    backTransition: Em.Router.transitionTo('step4'),
+    nextTransition: function (router) {
       var kerberosWizardController = router.get('kerberosWizardController');
       kerberosWizardController.setDBProperties({
         tasksStatuses: null,
@@ -319,7 +317,7 @@ module.exports = App.WizardRoute.extend({
     }
   }),
 
-  step7: Em.Route.extend({
+  step7: App.StepRoute.extend({
     route: '/step7',
 
     connectOutlets: function (router) {
@@ -337,13 +335,13 @@ module.exports = App.WizardRoute.extend({
     unroutePath: function () {
       return false;
     },
-    back: Em.Router.transitionTo('step4'),
-    next: function (router) {
+    backTransition: Em.Router.transitionTo('step4'),
+    nextTransition: function (router) {
       router.transitionTo('step8');
     }
   }),
 
-  step8: Em.Route.extend({
+  step8: App.StepRoute.extend({
     route: '/step8',
 
     connectOutlets: function (router) {
@@ -359,8 +357,8 @@ module.exports = App.WizardRoute.extend({
     unroutePath: function () {
       return false;
     },
-    back: Em.Router.transitionTo('step7'),
-    next: function (router) {
+    backTransition: Em.Router.transitionTo('step7'),
+    nextTransition: function (router) {
       var controller = router.get('kerberosWizardController');
       controller.resetOnClose(controller, 'adminKerberos.index');
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/styles/common.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/common.less b/ambari-web/app/styles/common.less
index 94a5cd3..ad1da66 100644
--- a/ambari-web/app/styles/common.less
+++ b/ambari-web/app/styles/common.less
@@ -198,3 +198,9 @@
 @modal-header-height: 50px;
 // modal footer height
 @modal-footer-height: 60px;
+
+.btn-primary[disabled] {
+  .icon-spinner {
+    color: white;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/templates/common/button_progress.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/button_progress.hbs b/ambari-web/app/templates/common/button_progress.hbs
new file mode 100644
index 0000000..03f2cbb
--- /dev/null
+++ b/ambari-web/app/templates/common/button_progress.hbs
@@ -0,0 +1,29 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<a {{bindAttr id="view.buttonId" class=":btn view.buttonClassNames" disabled="view.isDisabled"}} {{action handleClick target="view"}}>
+  {{#if view.doSpinRight}}
+    {{yield}}
+  {{/if}}
+  {{#if view.isInProgress}}
+    {{view App.SpinnerView tagName="span" classNames="service-button-spinner"}}
+  {{/if}}
+  {{#unless view.doSpinRight}}
+    {{yield}}
+  {{/unless}}
+</a>

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/templates/common/progress.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/progress.hbs b/ambari-web/app/templates/common/progress.hbs
index fce5641..ab59328 100644
--- a/ambari-web/app/templates/common/progress.hbs
+++ b/ambari-web/app/templates/common/progress.hbs
@@ -79,9 +79,11 @@
   <div class="wizard-footer col-md-12">
     <div class="btn-area">
       {{#if view.showBackButton}}
-        <button class="btn btn-default pull-left" {{bindAttr disabled="controller.isBackButtonDisabled"}} {{action back target="controller"}}>&larr; {{t common.back}}</button>
+        {{view App.WizardBackButton action="back" target="controller"}}
       {{/if}}
-      <button class="btn btn-success pull-right" {{bindAttr disabled="controller.isSubmitDisabled"}} {{action done target="controller"}}>{{{view.submitButtonText}}}</button>
+      {{#view App.WizardNextButton action="done" target="controller"}}
+        {{{view.parentView.submitButtonText}}}
+      {{/view}}
     </div>
   </div>
 {{/unless}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/templates/main/admin/kerberos/step1.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/kerberos/step1.hbs b/ambari-web/app/templates/main/admin/kerberos/step1.hbs
index 19e5596..57d0637 100644
--- a/ambari-web/app/templates/main/admin/kerberos/step1.hbs
+++ b/ambari-web/app/templates/main/admin/kerberos/step1.hbs
@@ -57,6 +57,6 @@
 </div>
 <div class="wizard-footer col-md-12">
   <div class="btn-area">
-    <button id="submit-kerberos-step1" class="btn btn-success pull-right" {{bindAttr disabled="isSubmitDisabled"}} {{action "next" target="controller"}}>{{t common.next}} &rarr;</button>
+    {{view App.WizardNextButton buttonId="submit-kerberos-step1" action="submit" target="controller"}}
   </div>
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/templates/main/admin/kerberos/step2.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/kerberos/step2.hbs b/ambari-web/app/templates/main/admin/kerberos/step2.hbs
index c17347c..942bd5e 100644
--- a/ambari-web/app/templates/main/admin/kerberos/step2.hbs
+++ b/ambari-web/app/templates/main/admin/kerberos/step2.hbs
@@ -34,9 +34,8 @@
   </div>
   <div class="wizard-footer col-md-12">
     <div class="btn-area">
-      <button id="back-kerberos-step2" class="btn btn-default" {{bindAttr disabled="isBackBtnDisabled"}} {{action back}}>&larr; {{t common.back}}</button>
-      <button id="submit-kerberos-step2" class="btn btn-success pull-right" {{bindAttr disabled="isSubmitDisabled"}}
-        {{action submit target="controller"}}>{{t common.next}} &rarr;</button>
+      {{view App.WizardBackButton buttonId="back-kerberos-step2" action="back" disabledBinding="controller.isBackBtnDisabled"}}
+      {{view App.WizardNextButton buttonId="submit-kerberos-step2" action="submit" target="controller"}}
     </div>
   </div>
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/templates/main/admin/kerberos/step3.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/kerberos/step3.hbs b/ambari-web/app/templates/main/admin/kerberos/step3.hbs
index 6e2ea57..01c927d 100644
--- a/ambari-web/app/templates/main/admin/kerberos/step3.hbs
+++ b/ambari-web/app/templates/main/admin/kerberos/step3.hbs
@@ -92,9 +92,9 @@
   <div class="wizard-footer col-md-12">
     <div class="btn-area">
       {{#if view.showBackButton}}
-        <button class="btn btn-default pull-left" {{bindAttr disabled="controller.isBackButtonDisabled"}} {{action back target="controller"}}>&larr; {{t common.back}}</button>
+        {{view App.WizardBackButton action="back" target="controller"}}
       {{/if}}
-      <button class="btn btn-success pull-right" {{bindAttr disabled="controller.isSubmitDisabled"}} {{action done target="controller"}}>{{{view.submitButtonText}}}</button>
+      {{view App.WizardNextButton buttonId="submit-kerberos-step3" action="done" target="controller"}}
     </div>
   </div>
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/templates/main/admin/kerberos/step4.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/kerberos/step4.hbs b/ambari-web/app/templates/main/admin/kerberos/step4.hbs
index 1e6f956..24f3576 100644
--- a/ambari-web/app/templates/main/admin/kerberos/step4.hbs
+++ b/ambari-web/app/templates/main/admin/kerberos/step4.hbs
@@ -27,9 +27,8 @@
   </div>
   <div class="wizard-footer col-md-12">
     <div class="btn-area">
-      <button id="back-kerberos-step4" class="btn btn-default" {{action back}}>&larr; {{t common.back}}</button>
-      <button id="submit-kerberos-step4" class="btn btn-success pull-right" {{bindAttr disabled="isSubmitDisabled"}}
-        {{action submit target="controller"}}>{{t common.next}} &rarr;</button>
+      {{view App.WizardBackButton buttonId="back-kerberos-step4" action="back"}}
+      {{view App.WizardNextButton buttonId="submit-kerberos-step4" action="submit" target="controller"}}
     </div>
   </div>
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/templates/main/admin/kerberos/step5.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/kerberos/step5.hbs b/ambari-web/app/templates/main/admin/kerberos/step5.hbs
index c0cdc00..0f55d45 100644
--- a/ambari-web/app/templates/main/admin/kerberos/step5.hbs
+++ b/ambari-web/app/templates/main/admin/kerberos/step5.hbs
@@ -45,7 +45,13 @@
         </div>
 
         <div class="additional btn-area clearfix">
-          <button class="btn btn-primary pull-right mls" {{action downloadCSV}}>{{t admin.kerberos.wizard.step5.downloadCSV}}</button>
+          {{#view App.ButtonProgressView
+             classNames="pull-right mls"
+             buttonClassNames="btn-primary"
+             action="downloadCSV"
+             isInProgressBinding="controller.isCSVRequestInProgress"}}
+            {{t admin.kerberos.wizard.step5.downloadCSV}}
+          {{/view}}
           <button class="btn btn-default pull-right" {{action exitWizard}}>{{t admin.kerberos.wizard.step5.exitWizard}}</button>
         </div>
       </div>
@@ -53,9 +59,8 @@
   </div>
   <div class="wizard-footer col-md-12">
     <div class="btn-area">
-      <button id="back-kerberos-step5" class="btn btn-default" {{action back}}>&larr; {{t common.back}}</button>
-      <button id="submit-kerberos-step5" class="btn btn-success pull-right" {{bindAttr disabled="isSubmitDisabled"}}
-        {{action submit target="controller"}}>{{t common.next}} &rarr;</button>
+      {{view App.WizardBackButton buttonId="back-kerberos-step5" action="back"}}
+      {{view App.WizardNextButton buttonId="submit-kerberos-step5" action="submit" target="controller"}}
     </div>
   </div>
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/views.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views.js b/ambari-web/app/views.js
index 9addd91..7ec59f7 100644
--- a/ambari-web/app/views.js
+++ b/ambari-web/app/views.js
@@ -27,6 +27,8 @@ require('views/common/log_file_search_view');
 require('views/common/log_tail_view');
 require('views/common/global/spinner');
 require('views/common/ajax_default_error_popup_body');
+require('views/common/buttons/button_progress_view');
+require('views/common/buttons/wizard_buttons');
 require('views/common/chart');
 require('views/common/chart/pie');
 require('views/common/chart/linear');

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/views/common/buttons/button_progress_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/buttons/button_progress_view.js b/ambari-web/app/views/common/buttons/button_progress_view.js
new file mode 100644
index 0000000..cae2664
--- /dev/null
+++ b/ambari-web/app/views/common/buttons/button_progress_view.js
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+
+App.ButtonProgressView = Em.View.extend(Em.TargetActionSupport, {
+  layoutName: require('templates/common/button_progress'),
+  isDisabled: Em.computed.or('disabled', 'isInProgress'),
+  /**
+   * Target to perform `onClick` function default to App.router
+   * @type {Em.Object|Em.View|Em.Controller}
+   */
+  target: null,
+  /**
+   * Property determines progress state
+   * @type {Boolean}
+   */
+  isInProgress: null,
+  /**
+   * on click handler
+   * @type {Function}
+   */
+  action: null,
+  /**
+   * When true spinner appears to right side, when false - to left
+   * @type {Boolean}
+   */
+  doSpinRight: true,
+
+  targetObject: function() {
+    var target = this.get('target'),
+        splitted;
+    if (!target) {
+      return this.get('controller.target');
+    } else if (typeof target === 'string') {
+      splitted = target.split('.');
+      if (splitted[0] === 'view') {
+        splitted = ['parentView'].concat(splitted.slice(1));
+      }
+      return Em.get(this, splitted.join('.'))
+    } else {
+      return target;
+    }
+  }.property('target'),
+
+  handleClick: function() {
+    if (this.get('isDisabled')) {
+      return;
+    }
+    var target = this.get('targetObject');
+    var targetMethod = this.get('action');
+    if (target.isState && typeof target.send === 'function') {
+      target.send(targetMethod);
+    } else if (targetMethod && typeof targetMethod === 'function') {
+      targetMethod.apply(target);
+    } else if (typeof targetMethod === 'string' && typeof Em.get(target, targetMethod) === 'function') {
+      Em.get(target, targetMethod).call(target);
+    } else {
+      Ember.Logger.error('Cannot invoke action %s on target %s', targetMethod, target);
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/app/views/common/buttons/wizard_buttons.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/buttons/wizard_buttons.js b/ambari-web/app/views/common/buttons/wizard_buttons.js
new file mode 100644
index 0000000..3bf89f2
--- /dev/null
+++ b/ambari-web/app/views/common/buttons/wizard_buttons.js
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+
+App.WizardNextButton = App.ButtonProgressView.extend({
+  classNames: ['pull-right'],
+  isInProgressBinding: 'App.router.nextBtnClickInProgress',
+  buttonClassNames: ['btn-success'],
+  template: Em.Handlebars.compile('{{t common.next}} &rarr;'),
+  disabledBinding: 'controller.isSubmitDisabled'
+});
+
+App.WizardBackButton = App.ButtonProgressView.extend({
+  classNames: ['pull-left'],
+  isInProgressBinding: 'App.router.backBtnClickInProgress',
+  buttonClassNames: ['btn-default'],
+  template: Em.Handlebars.compile('&larr; {{t common.back}}'),
+  doSpinRight: false,
+  disabledBinding: 'controller.isBackButtonDisabled'
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/test/controllers/main/admin/kerberos/step1_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/kerberos/step1_controller_test.js b/ambari-web/test/controllers/main/admin/kerberos/step1_controller_test.js
index 3181fda..ca80341 100644
--- a/ambari-web/test/controllers/main/admin/kerberos/step1_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/kerberos/step1_controller_test.js
@@ -70,7 +70,7 @@ describe('App.KerberosWizardStep1Controller', function() {
     });
   });
 
-  describe("#next()", function () {
+  describe("#submit()", function () {
 
     beforeEach(function() {
       sinon.stub(App.router, 'send');
@@ -84,7 +84,7 @@ describe('App.KerberosWizardStep1Controller', function() {
       controller.reopen({
         'isSubmitDisabled': false
       });
-      controller.next();
+      controller.submit();
       expect(App.router.send.calledOnce).to.be.true;
     });
 
@@ -92,7 +92,7 @@ describe('App.KerberosWizardStep1Controller', function() {
       controller.reopen({
         'isSubmitDisabled': true
       });
-      controller.next();
+      controller.submit();
       expect(App.router.send.called).to.be.false;
     });
   });

http://git-wip-us.apache.org/repos/asf/ambari/blob/307c8ad2/ambari-web/test/controllers/main/admin/kerberos/step2_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/kerberos/step2_controller_test.js b/ambari-web/test/controllers/main/admin/kerberos/step2_controller_test.js
index 2e603da..78e43f0 100644
--- a/ambari-web/test/controllers/main/admin/kerberos/step2_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/kerberos/step2_controller_test.js
@@ -37,7 +37,7 @@ describe('App.KerberosWizardStep2Controller', function() {
     controller = getController();
   });
 
-  App.TestAliases.testAsComputedAlias(getController(), 'isBackBtnDisabled', 'testConnectionInProgress', 'boolean');
+  App.TestAliases.testAsComputedOr(getController(), 'isBackBtnDisabled', ['testConnectionInProgress', 'App.router.nextBtnClickInProgress'], 'boolean');
 
   App.TestAliases.testAsComputedAlias(getController(), 'hostNames', 'App.allHostNames', 'array');
 


[50/50] ambari git commit: Merge branch 'trunk' into branch-feature-AMBARI-12556

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-12556


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/353a076c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/353a076c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/353a076c

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 353a076c8bf65a911f29b1f908e60d011459ffe0
Parents: 0c3478b cb030a4
Author: Nate Cole <nc...@hortonworks.com>
Authored: Fri Feb 24 09:18:46 2017 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri Feb 24 09:18:46 2017 -0500

----------------------------------------------------------------------
 .../main/resources/ui/admin-web/app/index.html  |   2 +-
 ambari-agent/etc/init.d/ambari-agent            |   1 +
 .../org/apache/ambari/logfeeder/LogFeeder.java  |  13 +-
 .../ambari/logfeeder/common/ConfigBlock.java    |   2 +-
 .../apache/ambari/logfeeder/filter/Filter.java  |   4 +-
 .../ambari/logfeeder/input/InputSimulate.java   |  31 +-
 .../handler/UploadConfigurationHandler.java     |  65 +-
 ambari-metrics/ambari-metrics-assembly/pom.xml  |  12 +
 .../src/main/assembly/collector-windows.xml     |   1 +
 .../src/main/assembly/collector.xml             |   1 +
 .../timeline/AbstractTimelineMetricsSink.java   |  22 +-
 .../timeline/HadoopTimelineMetricsSink.java     |  11 +-
 .../ambari-metrics-kafka-sink/pom.xml           |   7 +-
 .../kafka/KafkaTimelineMetricsReporter.java     |   9 +-
 .../conf/unix/amshbase_metrics_whitelist        | 162 +++++
 .../conf/windows/amshbase_metrics_whitelist     | 162 +++++
 .../ambari-metrics-timelineservice/pom.xml      |   5 +
 .../timeline/HBaseTimelineMetricStore.java      |   2 +-
 .../timeline/TimelineMetricConfiguration.java   |  39 ++
 .../metrics/timeline/TimelineMetricsFilter.java |  45 +-
 .../timeline/TimelineMetricsFilterTest.java     |  59 +-
 .../timeline/discovery/TestMetadataManager.java |  15 +-
 ambari-server/sbin/ambari-server                |   2 +-
 .../checks/DatabaseConsistencyCheckHelper.java  |  79 +--
 .../internal/ServiceResourceProvider.java       |   2 +-
 .../internal/UpgradeResourceProvider.java       |  25 +-
 .../controller/utilities/DatabaseChecker.java   | 100 +--
 .../ambari/server/orm/dao/ClusterDAO.java       | 136 +---
 .../orm/entities/ClusterConfigEntity.java       | 125 ++--
 .../entities/ClusterConfigMappingEntity.java    | 207 -------
 .../entities/ClusterConfigMappingEntityPK.java  |  83 ---
 .../server/orm/entities/ClusterEntity.java      |  13 +-
 .../orm/entities/ServiceConfigEntity.java       |   2 +-
 .../orm/helpers/dbms/GenericDbmsHelper.java     |   2 +-
 .../kerberos/FinalizeKerberosServerAction.java  |  14 +-
 .../kerberos/KerberosServerAction.java          |   2 +-
 .../ambari/server/state/ConfigHelper.java       |  80 ++-
 .../ambari/server/state/DesiredConfig.java      |  20 -
 .../ambari/server/state/RepositoryInfo.java     |  20 +-
 .../apache/ambari/server/state/ServiceImpl.java |  33 +-
 .../server/state/cluster/ClusterImpl.java       | 238 ++-----
 .../ambari/server/state/host/HostImpl.java      |  13 +-
 .../stack/upgrade/RepositoryVersionHelper.java  |   2 +
 .../ambari/server/topology/TopologyManager.java |   3 +
 .../server/upgrade/UpgradeCatalog300.java       |  92 ++-
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |  12 +-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |  12 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |  12 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |  12 +-
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |  12 +-
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |  12 +-
 .../HDP/grafana-logsearch-home.json             | 600 ++++++++++++++++++
 .../package/scripts/metadata_server.py          |   5 +-
 .../ATLAS/0.1.0.2.3/package/scripts/params.py   |   1 +
 .../DRUID/0.9.2/package/scripts/params.py       |   2 +-
 .../DRUID/0.9.2/package/scripts/superset.py     |   2 +-
 .../KAFKA/0.8.1/package/scripts/kafka_broker.py |  15 +-
 .../KAFKA/0.8.1/package/scripts/params.py       |   2 +
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |   9 +
 .../0.5.0/package/scripts/setup_logfeeder.py    |  43 +-
 .../0.5.0/package/scripts/setup_logsearch.py    |  35 +-
 .../0.4.0/package/scripts/setup_ranger_xml.py   |  13 +-
 .../common-services/YARN/2.1.0.2.0/metainfo.xml |   1 +
 .../common-services/YARN/3.0.0.3.0/metainfo.xml |   1 +
 .../src/main/resources/scripts/configs.sh       |   8 +-
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |   1 +
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  |  57 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |  17 -
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml |  17 -
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml |  48 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |  15 -
 .../stacks/HDP/2.3/upgrades/upgrade-2.5.xml     |  14 -
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |  19 +-
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |  57 +-
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml |  16 -
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml |  48 +-
 .../stacks/HDP/2.4/upgrades/upgrade-2.5.xml     |  14 -
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |  22 +-
 .../HIVE/configuration/hive-interactive-env.xml |   2 +-
 .../configuration/hive-interactive-site.xml     |   6 -
 .../stacks/HDP/2.5/services/stack_advisor.py    |   8 +
 .../stacks/HDP/2.5/upgrades/config-upgrade.xml  |  40 ++
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml |  32 +-
 .../stacks/HDP/2.5/upgrades/upgrade-2.6.xml     |   8 +
 .../HIVE/configuration/hive-interactive-env.xml |   2 +-
 .../configuration/hive-interactive-site.xml     |   7 +
 .../HIVE/configuration/tez-interactive-site.xml |   6 +-
 .../src/main/resources/stacks/stack_advisor.py  |  14 +-
 .../DatabaseConsistencyCheckHelperTest.java     |  56 +-
 .../server/orm/dao/ServiceConfigDAOTest.java    | 324 +++++-----
 .../FinalizeKerberosServerActionTest.java       | 206 ++++++
 .../ambari/server/state/DesiredConfigTest.java  |   2 -
 .../server/state/cluster/ClusterTest.java       | 281 ++++-----
 .../server/state/cluster/ClustersTest.java      |   7 +-
 .../ambari/server/state/host/HostTest.java      |  10 +-
 .../upgrade/RepositoryVersionHelperTest.java    |   3 +-
 .../server/upgrade/UpgradeCatalog300Test.java   | 167 ++++-
 .../stacks/2.2/common/test_stack_advisor.py     |  20 +-
 .../stacks/2.4/LOGSEARCH/test_logfeeder.py      |   6 +
 .../stacks/2.4/LOGSEARCH/test_logsearch.py      |   6 +
 .../test/python/stacks/2.4/configs/default.json |   1 +
 .../stacks/2.5/common/test_stack_advisor.py     |  21 +-
 .../test/resources/stacks/old_stack_advisor.py  |  12 +-
 ambari-web/app/assets/data/clusters/info.json   |  11 +
 ambari-web/app/assets/img/ambari-logo.png       | Bin 0 -> 2779 bytes
 ambari-web/app/assets/img/logo-white.png        | Bin 4538 -> 0 bytes
 ambari-web/app/config.js                        |   6 +-
 .../main/admin/kerberos/step1_controller.js     |   2 +-
 .../main/admin/kerberos/step2_controller.js     |  10 +-
 .../main/admin/kerberos/step4_controller.js     |   2 +
 .../main/admin/kerberos/step5_controller.js     |   4 +
 .../main/admin/stack_and_upgrade_controller.js  |   1 -
 .../main/alert_definitions_controller.js        |  14 +-
 .../main/alerts/alert_instances_controller.js   | 151 -----
 .../controllers/main/service/info/summary.js    |  12 +-
 .../wizard/step7/assign_master_controller.js    |  13 +-
 .../app/controllers/wizard/step9_controller.js  |   7 +-
 ambari-web/app/messages.js                      |   6 +
 .../wizard/wizardProgressPageController.js      |   2 +
 ambari-web/app/routes/add_kerberos_routes.js    |  50 +-
 ambari-web/app/routes/main.js                   |   8 +-
 ambari-web/app/styles/application.less          |  14 +-
 ambari-web/app/styles/common.less               |  13 +-
 .../app/styles/enhanced_service_dashboard.less  |   1 -
 ambari-web/app/styles/modal_popups.less         |  11 +-
 ambari-web/app/styles/stack_versions.less       |   2 +-
 .../app/styles/theme/bootstrap-ambari.css       | 619 ++++++++++++++++++-
 ambari-web/app/styles/top-nav.less              | 204 ++----
 ambari-web/app/styles/wizard.less               |  14 +-
 ambari-web/app/templates/application.hbs        | 226 ++++---
 .../common/assign_master_components.hbs         |  33 +-
 .../app/templates/common/button_progress.hbs    |  29 +
 .../configs/propertyDependence_footer.hbs       |  25 -
 ambari-web/app/templates/common/modal_popup.hbs |  26 +-
 ambari-web/app/templates/common/progress.hbs    |  18 +-
 .../app/templates/main/admin/kerberos/step1.hbs |   2 +-
 .../app/templates/main/admin/kerberos/step2.hbs |   5 +-
 .../app/templates/main/admin/kerberos/step3.hbs |   4 +-
 .../app/templates/main/admin/kerberos/step4.hbs |   5 +-
 .../app/templates/main/admin/kerberos/step5.hbs |  13 +-
 .../main/alerts/alert_notifications_popup.hbs   |  69 +++
 ambari-web/app/templates/main/dashboard.hbs     |  26 +-
 ambari-web/app/templates/main/menu.hbs          |  23 -
 ambari-web/app/templates/main/menu_item.hbs     |  50 --
 ambari-web/app/templates/main/service.hbs       |   6 +-
 .../main/service/all_services_actions.hbs       | 100 ++-
 .../main/service/info/save_popup_footer.hbs     |  13 +-
 .../app/templates/main/service/menu_item.hbs    |   4 +-
 .../app/templates/main/side-menu-item.hbs       |  53 ++
 .../step3/step3_host_warning_popup_footer.hbs   |  14 +-
 ambari-web/app/utils/date/date.js               |  26 +-
 ambari-web/app/utils/ember_reopen.js            |   4 +
 ambari-web/app/views.js                         |   3 +
 ambari-web/app/views/application.js             | 125 +++-
 .../common/assign_master_components_view.js     |   6 +-
 .../common/buttons/button_progress_view.js      |  77 +++
 .../app/views/common/buttons/wizard_buttons.js  |  36 ++
 .../configs/service_configs_by_category_view.js |  15 +-
 .../views/main/admin/kerberos/disable_view.js   |   2 +-
 .../main/alerts/alert_instances_popup_view.js   | 190 ++++++
 .../dashboard/widgets/uptime_text_widget.js     |   9 +-
 ambari-web/app/views/main/menu.js               | 179 ++++--
 .../views/main/service/all_services_actions.js  |   2 +
 ambari-web/app/views/main/service/menu.js       | 104 +---
 .../app/views/main/service/services/yarn.js     |   1 +
 .../views/wizard/step7/assign_master_view.js    |   6 +-
 ambari-web/brunch-config.js                     |   3 +-
 .../admin/kerberos/step1_controller_test.js     |   6 +-
 .../admin/kerberos/step2_controller_test.js     |   2 +-
 .../alerts/alert_instances_controller_test.js   |  33 -
 ambari-web/test/init_test.js                    |  42 ++
 ambari-web/test/mappers/service_mapper_test.js  |   2 +-
 ambari-web/test/utils/date/date_test.js         |  30 +-
 .../stack_upgrade/upgrade_history_view_test.js  |   8 +-
 .../widgets/hbase_master_uptime_test.js         |   4 +-
 .../dashboard/widgets/namenode_uptime_test.js   |   4 +-
 .../widgets/resource_manager_uptime_test.js     |   4 +-
 .../widgets/uptime_text_widget_test.js          |   4 +-
 ambari-web/test/views/main/menu_test.js         |   2 +-
 .../vendor/scripts/theme/bootstrap-ambari.js    | 204 ++++++
 .../stacks/HDF/2.0/services/stack_advisor.py    |   2 +-
 .../ambari/view/commons/hdfs/UploadService.java |   2 +-
 .../resources/ui/app/adapters/application.js    |   6 +
 .../resources/ui/app/components/upload-file.js  |  19 +-
 .../ambari/view/hive20/ConnectionSystem.java    |  27 -
 .../hive20/actor/DatabaseChangeNotifier.java    | 168 -----
 .../view/hive20/actor/DatabaseManager.java      | 313 ----------
 .../ambari/view/hive20/actor/JdbcConnector.java |   5 +-
 .../ambari/view/hive20/actor/LogAggregator.java |   4 +-
 .../view/hive20/actor/MetaDataManager.java      | 137 ----
 .../view/hive20/actor/MetaDataRetriever.java    | 173 ------
 .../view/hive20/actor/TableChangeNotifier.java  |  95 ---
 .../ambari/view/hive20/actor/message/Ping.java  |  50 --
 .../ambari/view/hive20/client/DDLDelegator.java |   7 +-
 .../view/hive20/client/DDLDelegatorImpl.java    |  52 +-
 .../view/hive20/internal/dto/TableInfo.java     |  12 +-
 .../view/hive20/internal/dto/TableResponse.java |   9 -
 .../view/hive20/resources/browser/DDLProxy.java |  67 +-
 .../resources/browser/HiveBrowserService.java   | 259 --------
 .../hive20/resources/system/SystemService.java  |  28 +-
 .../utils/MetaDataManagerEventSubmitter.java    |  43 --
 .../src/main/resources/ui/app/adapters/ping.js  |  35 --
 .../main/resources/ui/app/adapters/ranger.js    |  27 +
 .../resources/ui/app/components/list-filter.js  |   5 +-
 .../ui/app/components/table-statistics.js       |   5 +-
 .../ui/app/components/visual-explain.js         |   2 +
 .../resources/ui/app/configs/result-tabs.js     |   6 +-
 .../src/main/resources/ui/app/models/table.js   |   9 +-
 .../main/resources/ui/app/models/worksheet.js   |   3 +-
 .../main/resources/ui/app/routes/application.js |   5 -
 .../routes/databases/database/tables/table.js   |   2 +
 .../databases/database/tables/table/auth.js     |   2 +-
 .../databases/database/tables/table/edit.js     |   2 +-
 .../main/resources/ui/app/routes/queries/new.js |   1 -
 .../resources/ui/app/routes/queries/query.js    | 126 +++-
 .../resources/ui/app/routes/savedqueries.js     |   3 -
 .../resources/ui/app/serializers/database.js    |  25 +
 .../main/resources/ui/app/serializers/table.js  |  22 +
 .../src/main/resources/ui/app/services/jobs.js  |   5 +
 .../resources/ui/app/services/keep-alive.js     |  31 -
 .../resources/ui/app/services/tez-view-info.js  |  83 +++
 .../src/main/resources/ui/app/styles/app.scss   |   9 +
 .../app/templates/components/visual-explain.hbs |   2 +-
 .../ui/app/templates/queries/query.hbs          |  17 +-
 .../ui/app/templates/queries/query/tez-ui.hbs   |   6 +-
 .../resources/ui/app/templates/savedqueries.hbs |   1 -
 .../app/utils/hive-explainer/renderer-force.js  |   2 +-
 .../ui/app/utils/hive-explainer/renderer.js     |   4 +-
 .../views/hive20/src/main/resources/view.xml    |   5 -
 .../apache/oozie/ambari/view/OozieDelegate.java |  12 +-
 .../ambari/view/OozieProxyImpersonator.java     | 381 +++++-------
 .../oozie/ambari/view/assets/AssetResource.java |  93 +--
 .../oozie/ambari/view/exception/ErrorCode.java  |  58 ++
 .../ambari/view/exception/WfmException.java     |  46 ++
 .../ambari/view/exception/WfmWebException.java  | 115 ++++
 .../WorkflowsManagerResource.java               |  36 +-
 .../ui/app/components/bundle-config.js          |   8 +-
 .../resources/ui/app/components/coord-config.js |   6 +-
 .../ui/app/components/designer-errors.js        |  49 ++
 .../ui/app/components/flow-designer.js          | 108 ++--
 .../main/resources/ui/app/components/save-wf.js |   2 +-
 .../ui/app/components/stack-trace-dialog.js     |  26 +
 .../ui/app/components/workflow-action-editor.js |  40 +-
 .../ui/app/domain/cytoscape-flow-renderer.js    |   2 +-
 .../resources/ui/app/domain/cytoscape-style.js  |   8 +
 .../main/resources/ui/app/domain/workflow.js    |   9 +-
 .../src/main/resources/ui/app/styles/app.less   |  18 +-
 .../app/templates/components/bundle-config.hbs  |   4 +-
 .../app/templates/components/coord-config.hbs   |   2 +-
 .../templates/components/designer-errors.hbs    |  17 +
 .../app/templates/components/flow-designer.hbs  |  20 +-
 .../ui/app/templates/components/save-wf.hbs     |   4 +-
 .../templates/components/stack-trace-dialog.hbs |  33 +
 .../components/workflow-action-editor.hbs       |   5 +-
 .../components/stack-trace-dialog-test.js       |  40 ++
 255 files changed, 5951 insertions(+), 4532 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/353a076c/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/353a076c/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
index 483362b,d9b9b57..d527290
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
@@@ -106,8 -110,8 +113,9 @@@ public class UpgradeCatalog300 extends 
     */
    @Override
    protected void executeDDLUpdates() throws AmbariException, SQLException {
 +    addServiceComponentColumn();
      updateStageTable();
+     updateClusterConfigurationTable();
    }
  
    protected void updateStageTable() throws SQLException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/353a076c/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/353a076c/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/353a076c/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/353a076c/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/353a076c/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/353a076c/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/353a076c/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
index 3c933d9,a44c2b3..e0d07b4
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
@@@ -53,25 -122,86 +121,96 @@@ public class UpgradeCatalog300Test 
  
    @Test
    public void testExecuteDDLUpdates() throws Exception {
+     Module module = new Module() {
+       @Override
+       public void configure(Binder binder) {
+         binder.bind(DBAccessor.class).toInstance(dbAccessor);
+         binder.bind(OsFamily.class).toInstance(osFamily);
+         binder.bind(EntityManager.class).toInstance(entityManager);
+         binder.bind(Configuration.class).toInstance(configuration);
+       }
+     };
+ 
+     Capture<DBAccessor.DBColumnInfo> clusterConfigSelectedColumn = newCapture();
+     Capture<DBAccessor.DBColumnInfo> clusterConfigSelectedTimestampColumn = newCapture();
+     dbAccessor.addColumn(eq(UpgradeCatalog300.CLUSTER_CONFIG_TABLE), capture(clusterConfigSelectedColumn));
+     dbAccessor.addColumn(eq(UpgradeCatalog300.CLUSTER_CONFIG_TABLE), capture(clusterConfigSelectedTimestampColumn));
+ 
++    // component table
++    Capture<DBAccessor.DBColumnInfo> componentStateColumn = newCapture();
++    dbAccessor.addColumn(eq(UpgradeCatalog250.COMPONENT_TABLE), capture(componentStateColumn));
++
+     replay(dbAccessor, configuration);
+ 
+     Injector injector = Guice.createInjector(module);
+     UpgradeCatalog300 upgradeCatalog300 = injector.getInstance(UpgradeCatalog300.class);
+     upgradeCatalog300.executeDDLUpdates();
  
-     Method updateStageTable = UpgradeCatalog300.class.getDeclaredMethod("updateStageTable");
-     Method addServiceComponentColumn = UpgradeCatalog300.class
-         .getDeclaredMethod("addServiceComponentColumn");
+     DBAccessor.DBColumnInfo capturedSelectedColumn = clusterConfigSelectedColumn.getValue();
+     Assert.assertNotNull(capturedSelectedColumn);
+     Assert.assertEquals(UpgradeCatalog300.CLUSTER_CONFIG_SELECTED_COLUMN, capturedSelectedColumn.getName());
+     Assert.assertEquals(Short.class, capturedSelectedColumn.getType());
  
-     UpgradeCatalog300 upgradeCatalog300 = createMockBuilder(UpgradeCatalog300.class)
-         .addMockedMethod(addServiceComponentColumn)
-         .addMockedMethod(updateStageTable)
-         .createMock();
+     DBAccessor.DBColumnInfo capturedSelectedTimestampColumn = clusterConfigSelectedTimestampColumn.getValue();
+     Assert.assertNotNull(capturedSelectedTimestampColumn);
+     Assert.assertEquals(UpgradeCatalog300.CLUSTER_CONFIG_SELECTED_TIMESTAMP_COLUMN, capturedSelectedTimestampColumn.getName());
+     Assert.assertEquals(Long.class, capturedSelectedTimestampColumn.getType());
  
-     upgradeCatalog300.addServiceComponentColumn();
++    // component table
++    DBAccessor.DBColumnInfo capturedStateColumn = componentStateColumn.getValue();
++    Assert.assertNotNull(componentStateColumn);
++    Assert.assertEquals("repo_state", capturedStateColumn.getName());
++    Assert.assertEquals(String.class, capturedStateColumn.getType());
 +
-     upgradeCatalog300.updateStageTable();
+     verify(dbAccessor);
+   }
  
-     replay(upgradeCatalog300);
+   /**
+    * Tests pre-DML executions.
+    *
+    * @throws Exception
+    */
+   @Test
+   public void testExecutePreDMLUpdates() throws Exception {
+     Module module = new Module() {
+       @Override
+       public void configure(Binder binder) {
+         binder.bind(DBAccessor.class).toInstance(dbAccessor);
+         binder.bind(OsFamily.class).toInstance(osFamily);
+         binder.bind(EntityManager.class).toInstance(entityManager);
+         binder.bind(Configuration.class).toInstance(configuration);
+       }
+     };
  
-     upgradeCatalog300.executeDDLUpdates();
+     EntityManagerFactory emFactory = EasyMock.createNiceMock(EntityManagerFactory.class);
+     Cache emCache = EasyMock.createNiceMock(Cache.class);
  
-     verify(upgradeCatalog300);
-   }
+     expect(entityManager.getEntityManagerFactory()).andReturn(emFactory).atLeastOnce();
+     expect(emFactory.getCache()).andReturn(emCache).atLeastOnce();
+ 
+     EntityTransaction mockTransaction = EasyMock.createNiceMock(EntityTransaction.class);
+     Connection mockConnection = EasyMock.createNiceMock(Connection.class);
+     Statement mockStatement = EasyMock.createNiceMock(Statement.class);
  
+     expect(dbAccessor.getConnection()).andReturn(mockConnection).once();
+     expect(mockConnection.createStatement()).andReturn(mockStatement).once();
+ 
+     expect(mockStatement.executeQuery(EasyMock.anyString())).andReturn(
+         EasyMock.createNiceMock(ResultSet.class));
+ 
+     expect(entityManager.getTransaction()).andReturn(
+         mockTransaction).atLeastOnce();
+ 
+     dbAccessor.dropTable(UpgradeCatalog300.CLUSTER_CONFIG_MAPPING_TABLE);
+     EasyMock.expectLastCall().once();
+ 
+     replay(dbAccessor, entityManager, emFactory, emCache, mockConnection, mockTransaction,
+         mockStatement, configuration);
+ 
+     Injector injector = Guice.createInjector(module);
+     UpgradeCatalog300 upgradeCatalog300 = injector.getInstance(UpgradeCatalog300.class);
+     upgradeCatalog300.executePreDMLUpdates();
+ 
+     verify(dbAccessor, entityManager, emFactory, emCache);
+   }
  }


[04/50] ambari git commit: AMBARI-20103 : Reduce AMS HBase metrics through whitelist. (avijayan)

Posted by nc...@apache.org.
AMBARI-20103 : Reduce AMS HBase metrics through whitelist. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b986a3c2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b986a3c2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b986a3c2

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b986a3c2db753a006718592c7689590bc842c6c6
Parents: ef0ae8b
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Tue Feb 21 16:17:13 2017 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Tue Feb 21 16:17:13 2017 -0800

----------------------------------------------------------------------
 ambari-metrics/ambari-metrics-assembly/pom.xml  |  12 ++
 .../src/main/assembly/collector-windows.xml     |   1 +
 .../src/main/assembly/collector.xml             |   1 +
 .../conf/unix/amshbase_metrics_whitelist        | 162 +++++++++++++++++++
 .../conf/windows/amshbase_metrics_whitelist     | 162 +++++++++++++++++++
 .../ambari-metrics-timelineservice/pom.xml      |   5 +
 .../timeline/HBaseTimelineMetricStore.java      |   2 +-
 .../timeline/TimelineMetricConfiguration.java   |  39 +++++
 .../metrics/timeline/TimelineMetricsFilter.java |  45 +++---
 .../timeline/TimelineMetricsFilterTest.java     |  59 ++++++-
 .../timeline/discovery/TestMetadataManager.java |  15 +-
 11 files changed, 475 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b986a3c2/ambari-metrics/ambari-metrics-assembly/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-assembly/pom.xml b/ambari-metrics/ambari-metrics-assembly/pom.xml
index cdcc2a7..a4b87de 100644
--- a/ambari-metrics/ambari-metrics-assembly/pom.xml
+++ b/ambari-metrics/ambari-metrics-assembly/pom.xml
@@ -338,6 +338,9 @@
                           <location>${collector.dir}/conf/unix/metrics_whitelist</location>
                         </source>
                         <source>
+                          <location>${collector.dir}/conf/unix/amshbase_metrics_whitelist</location>
+                        </source>
+                        <source>
                           <location>${collector.dir}/target/embedded/${hbase.folder}/conf/hbase-site.xml</location>
                         </source>
                       </sources>
@@ -882,6 +885,15 @@
                   </mapper>
                 </data>
                 <data>
+                  <src>${collector.dir}/conf/unix/amshbase_metrics_whitelist</src>
+                  <type>file</type>
+                  <mapper>
+                    <type>perm</type>
+                    <filemode>755</filemode>
+                    <prefix>/etc/ambari-metrics-collector/conf</prefix>
+                  </mapper>
+                </data>
+                <data>
                   <src>${collector.dir}/target/embedded/${hbase.folder}/conf/hbase-site.xml</src>
                   <type>file</type>
                   <mapper>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b986a3c2/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector-windows.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector-windows.xml b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector-windows.xml
index 1ea6c46..8b7a021 100644
--- a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector-windows.xml
+++ b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector-windows.xml
@@ -46,6 +46,7 @@
         <include>ams-site.xml</include>
         <include>log4j.properties</include>
         <include>metrics_whitelist</include>
+        <include>amshbase_metrics_whitelist</include>
       </includes>
     </fileSet>
     <fileSet>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b986a3c2/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector.xml b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector.xml
index 0997b48..7f338be 100644
--- a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector.xml
+++ b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/collector.xml
@@ -45,6 +45,7 @@
         <include>ams-site.xml</include>
         <include>log4j.properties</include>
         <include>metrics_whitelist</include>
+        <include>amshbase_metrics_whitelist</include>
       </includes>
     </fileSet>
     <fileSet>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b986a3c2/ambari-metrics/ambari-metrics-timelineservice/conf/unix/amshbase_metrics_whitelist
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/amshbase_metrics_whitelist b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/amshbase_metrics_whitelist
new file mode 100644
index 0000000..6cf4319
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/amshbase_metrics_whitelist
@@ -0,0 +1,162 @@
+jvm.Master.JvmMetrics.ThreadsBlocked
+jvm.Master.JvmMetrics.ThreadsNew
+jvm.Master.JvmMetrics.ThreadsRunnable
+jvm.Master.JvmMetrics.ThreadsTerminated
+jvm.Master.JvmMetrics.ThreadsTimedWaiting
+jvm.Master.JvmMetrics.ThreadsWaiting
+jvm.RegionServer.JvmMetrics.GcCount
+jvm.RegionServer.JvmMetrics.GcCountConcurrentMarkSweep
+jvm.RegionServer.JvmMetrics.GcCountParNew
+jvm.RegionServer.JvmMetrics.GcTimeMillis
+jvm.RegionServer.JvmMetrics.GcTimeMillisConcurrentMarkSweep
+jvm.RegionServer.JvmMetrics.GcTimeMillisParNew
+jvm.RegionServer.JvmMetrics.MemHeapCommittedM
+jvm.RegionServer.JvmMetrics.MemHeapMaxM
+jvm.RegionServer.JvmMetrics.MemHeapUsedM
+jvm.RegionServer.JvmMetrics.MemNonHeapCommittedM
+jvm.RegionServer.JvmMetrics.MemNonHeapMaxM
+jvm.RegionServer.JvmMetrics.MemNonHeapUsedM
+jvm.RegionServer.JvmMetrics.ThreadsBlocked
+jvm.RegionServer.JvmMetrics.ThreadsNew
+jvm.RegionServer.JvmMetrics.ThreadsRunnable
+jvm.RegionServer.JvmMetrics.ThreadsTerminated
+jvm.RegionServer.JvmMetrics.ThreadsTimedWaiting
+jvm.RegionServer.JvmMetrics.ThreadsWaiting
+master.AssignmentManger.ritCount
+master.AssignmentManger.ritCountOverThreshold
+master.AssignmentManger.ritOldestAge
+master.Master.TotalCallTime_num_ops
+master.Master.exceptions
+master.Server.averageLoad
+master.Server.numDeadRegionServers
+master.Server.numRegionServers
+regionserver.RegionServer.ProcessCallTime_75th_percentile
+regionserver.RegionServer.ProcessCallTime_95th_percentile
+regionserver.RegionServer.ProcessCallTime_99th_percentile
+regionserver.RegionServer.ProcessCallTime_max
+regionserver.RegionServer.ProcessCallTime_mean
+regionserver.RegionServer.ProcessCallTime_median
+regionserver.RegionServer.QueueCallTime_75th_percentile
+regionserver.RegionServer.QueueCallTime_95th_percentile
+regionserver.RegionServer.QueueCallTime_99th_percentile
+regionserver.RegionServer.QueueCallTime_max
+regionserver.RegionServer.QueueCallTime_mean
+regionserver.RegionServer.QueueCallTime_median
+regionserver.RegionServer.TotalCallTime_num_ops
+regionserver.RegionServer.authenticationFailures
+regionserver.RegionServer.authenticationSuccesses
+regionserver.RegionServer.authorizationFailures
+regionserver.RegionServer.authorizationSuccesses
+regionserver.RegionServer.exceptions
+regionserver.RegionServer.numActiveHandler
+regionserver.RegionServer.numCallsInGeneralQueue
+regionserver.RegionServer.numCallsInPriorityQueue
+regionserver.RegionServer.numCallsInReplicationQueue
+regionserver.RegionServer.numOpenConnections
+regionserver.RegionServer.queueSize
+regionserver.RegionServer.receivedBytes
+regionserver.RegionServer.sentBytes
+regionserver.Server.Append_75th_percentile
+regionserver.Server.Append_95th_percentile
+regionserver.Server.Append_99th_percentile
+regionserver.Server.Append_max
+regionserver.Server.Append_mean
+regionserver.Server.Append_median
+regionserver.Server.Append_min
+regionserver.Server.Append_num_ops
+regionserver.Server.Delete_75th_percentile
+regionserver.Server.Delete_95th_percentile
+regionserver.Server.Delete_99th_percentile
+regionserver.Server.Delete_max
+regionserver.Server.Delete_mean
+regionserver.Server.Delete_median
+regionserver.Server.Delete_min
+regionserver.Server.Delete_num_ops
+regionserver.Server.Get_75th_percentile
+regionserver.Server.Get_95th_percentile
+regionserver.Server.Get_99th_percentile
+regionserver.Server.Get_max
+regionserver.Server.Get_mean
+regionserver.Server.Get_median
+regionserver.Server.Get_min
+regionserver.Server.Get_num_ops
+regionserver.Server.Increment_75th_percentile
+regionserver.Server.Increment_95th_percentile
+regionserver.Server.Increment_99th_percentile
+regionserver.Server.Increment_max
+regionserver.Server.Increment_mean
+regionserver.Server.Increment_median
+regionserver.Server.Increment_min
+regionserver.Server.Increment_num_ops
+regionserver.Server.Mutate_75th_percentile
+regionserver.Server.Mutate_95th_percentile
+regionserver.Server.Mutate_99th_percentile
+regionserver.Server.Mutate_max
+regionserver.Server.Mutate_mean
+regionserver.Server.Mutate_median
+regionserver.Server.Mutate_min
+regionserver.Server.Mutate_num_ops
+regionserver.Server.Replay_75th_percentile
+regionserver.Server.Replay_95th_percentile
+regionserver.Server.Replay_99th_percentile
+regionserver.Server.Replay_max
+regionserver.Server.Replay_mean
+regionserver.Server.Replay_median
+regionserver.Server.Replay_min
+regionserver.Server.Replay_num_ops
+regionserver.Server.ScanNext_num_ops
+regionserver.Server.ScanTime_75th_percentile
+regionserver.Server.ScanTime_95th_percentile
+regionserver.Server.ScanTime_99th_percentile
+regionserver.Server.ScanTime_max
+regionserver.Server.ScanTime_mean
+regionserver.Server.ScanTime_median
+regionserver.Server.blockCacheCount
+regionserver.Server.blockCacheCountHitPercent
+regionserver.Server.blockCacheEvictionCount
+regionserver.Server.blockCacheExpressHitPercent
+regionserver.Server.blockCacheFreeSize
+regionserver.Server.blockCacheHitCount
+regionserver.Server.blockCacheMissCount
+regionserver.Server.blockCacheSize
+regionserver.Server.compactionQueueLength
+regionserver.Server.flushQueueLength
+regionserver.Server.hlogFileCount
+regionserver.Server.hlogFileSize
+regionserver.Server.memStoreSize
+regionserver.Server.percentFilesLocal
+regionserver.Server.readRequestCount
+regionserver.Server.regionCount
+regionserver.Server.slowDeleteCount
+regionserver.Server.slowGetCount
+regionserver.Server.slowPutCount
+regionserver.Server.splitQueueLength
+regionserver.Server.staticBloomSize
+regionserver.Server.staticIndexSize
+regionserver.Server.storeCount
+regionserver.Server.storeFileCount
+regionserver.Server.storeFileIndexSize
+regionserver.Server.storeFileSize
+regionserver.Server.totalRequestCount
+regionserver.Server.writeRequestCount
+regionserver.WAL.AppendSize_75th_percentile
+regionserver.WAL.AppendSize_95th_percentile
+regionserver.WAL.AppendSize_99th_percentile
+regionserver.WAL.AppendSize_max
+regionserver.WAL.AppendSize_mean
+regionserver.WAL.AppendSize_median
+regionserver.WAL.AppendTime_75th_percentile
+regionserver.WAL.AppendTime_95th_percentile
+regionserver.WAL.AppendTime_99th_percentile
+regionserver.WAL.AppendTime_max
+regionserver.WAL.AppendTime_mean
+regionserver.WAL.AppendTime_median
+regionserver.WAL.SyncTime_75th_percentile
+regionserver.WAL.SyncTime_95th_percentile
+regionserver.WAL.SyncTime_99th_percentile
+regionserver.WAL.SyncTime_max
+regionserver.WAL.SyncTime_mean
+regionserver.WAL.SyncTime_median
+regionserver.WAL.SyncTime_num_ops
+regionserver.WAL.appendCount
+regionserver.WAL.slowAppendCount
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b986a3c2/ambari-metrics/ambari-metrics-timelineservice/conf/windows/amshbase_metrics_whitelist
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/conf/windows/amshbase_metrics_whitelist b/ambari-metrics/ambari-metrics-timelineservice/conf/windows/amshbase_metrics_whitelist
new file mode 100644
index 0000000..6cf4319
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/conf/windows/amshbase_metrics_whitelist
@@ -0,0 +1,162 @@
+jvm.Master.JvmMetrics.ThreadsBlocked
+jvm.Master.JvmMetrics.ThreadsNew
+jvm.Master.JvmMetrics.ThreadsRunnable
+jvm.Master.JvmMetrics.ThreadsTerminated
+jvm.Master.JvmMetrics.ThreadsTimedWaiting
+jvm.Master.JvmMetrics.ThreadsWaiting
+jvm.RegionServer.JvmMetrics.GcCount
+jvm.RegionServer.JvmMetrics.GcCountConcurrentMarkSweep
+jvm.RegionServer.JvmMetrics.GcCountParNew
+jvm.RegionServer.JvmMetrics.GcTimeMillis
+jvm.RegionServer.JvmMetrics.GcTimeMillisConcurrentMarkSweep
+jvm.RegionServer.JvmMetrics.GcTimeMillisParNew
+jvm.RegionServer.JvmMetrics.MemHeapCommittedM
+jvm.RegionServer.JvmMetrics.MemHeapMaxM
+jvm.RegionServer.JvmMetrics.MemHeapUsedM
+jvm.RegionServer.JvmMetrics.MemNonHeapCommittedM
+jvm.RegionServer.JvmMetrics.MemNonHeapMaxM
+jvm.RegionServer.JvmMetrics.MemNonHeapUsedM
+jvm.RegionServer.JvmMetrics.ThreadsBlocked
+jvm.RegionServer.JvmMetrics.ThreadsNew
+jvm.RegionServer.JvmMetrics.ThreadsRunnable
+jvm.RegionServer.JvmMetrics.ThreadsTerminated
+jvm.RegionServer.JvmMetrics.ThreadsTimedWaiting
+jvm.RegionServer.JvmMetrics.ThreadsWaiting
+master.AssignmentManger.ritCount
+master.AssignmentManger.ritCountOverThreshold
+master.AssignmentManger.ritOldestAge
+master.Master.TotalCallTime_num_ops
+master.Master.exceptions
+master.Server.averageLoad
+master.Server.numDeadRegionServers
+master.Server.numRegionServers
+regionserver.RegionServer.ProcessCallTime_75th_percentile
+regionserver.RegionServer.ProcessCallTime_95th_percentile
+regionserver.RegionServer.ProcessCallTime_99th_percentile
+regionserver.RegionServer.ProcessCallTime_max
+regionserver.RegionServer.ProcessCallTime_mean
+regionserver.RegionServer.ProcessCallTime_median
+regionserver.RegionServer.QueueCallTime_75th_percentile
+regionserver.RegionServer.QueueCallTime_95th_percentile
+regionserver.RegionServer.QueueCallTime_99th_percentile
+regionserver.RegionServer.QueueCallTime_max
+regionserver.RegionServer.QueueCallTime_mean
+regionserver.RegionServer.QueueCallTime_median
+regionserver.RegionServer.TotalCallTime_num_ops
+regionserver.RegionServer.authenticationFailures
+regionserver.RegionServer.authenticationSuccesses
+regionserver.RegionServer.authorizationFailures
+regionserver.RegionServer.authorizationSuccesses
+regionserver.RegionServer.exceptions
+regionserver.RegionServer.numActiveHandler
+regionserver.RegionServer.numCallsInGeneralQueue
+regionserver.RegionServer.numCallsInPriorityQueue
+regionserver.RegionServer.numCallsInReplicationQueue
+regionserver.RegionServer.numOpenConnections
+regionserver.RegionServer.queueSize
+regionserver.RegionServer.receivedBytes
+regionserver.RegionServer.sentBytes
+regionserver.Server.Append_75th_percentile
+regionserver.Server.Append_95th_percentile
+regionserver.Server.Append_99th_percentile
+regionserver.Server.Append_max
+regionserver.Server.Append_mean
+regionserver.Server.Append_median
+regionserver.Server.Append_min
+regionserver.Server.Append_num_ops
+regionserver.Server.Delete_75th_percentile
+regionserver.Server.Delete_95th_percentile
+regionserver.Server.Delete_99th_percentile
+regionserver.Server.Delete_max
+regionserver.Server.Delete_mean
+regionserver.Server.Delete_median
+regionserver.Server.Delete_min
+regionserver.Server.Delete_num_ops
+regionserver.Server.Get_75th_percentile
+regionserver.Server.Get_95th_percentile
+regionserver.Server.Get_99th_percentile
+regionserver.Server.Get_max
+regionserver.Server.Get_mean
+regionserver.Server.Get_median
+regionserver.Server.Get_min
+regionserver.Server.Get_num_ops
+regionserver.Server.Increment_75th_percentile
+regionserver.Server.Increment_95th_percentile
+regionserver.Server.Increment_99th_percentile
+regionserver.Server.Increment_max
+regionserver.Server.Increment_mean
+regionserver.Server.Increment_median
+regionserver.Server.Increment_min
+regionserver.Server.Increment_num_ops
+regionserver.Server.Mutate_75th_percentile
+regionserver.Server.Mutate_95th_percentile
+regionserver.Server.Mutate_99th_percentile
+regionserver.Server.Mutate_max
+regionserver.Server.Mutate_mean
+regionserver.Server.Mutate_median
+regionserver.Server.Mutate_min
+regionserver.Server.Mutate_num_ops
+regionserver.Server.Replay_75th_percentile
+regionserver.Server.Replay_95th_percentile
+regionserver.Server.Replay_99th_percentile
+regionserver.Server.Replay_max
+regionserver.Server.Replay_mean
+regionserver.Server.Replay_median
+regionserver.Server.Replay_min
+regionserver.Server.Replay_num_ops
+regionserver.Server.ScanNext_num_ops
+regionserver.Server.ScanTime_75th_percentile
+regionserver.Server.ScanTime_95th_percentile
+regionserver.Server.ScanTime_99th_percentile
+regionserver.Server.ScanTime_max
+regionserver.Server.ScanTime_mean
+regionserver.Server.ScanTime_median
+regionserver.Server.blockCacheCount
+regionserver.Server.blockCacheCountHitPercent
+regionserver.Server.blockCacheEvictionCount
+regionserver.Server.blockCacheExpressHitPercent
+regionserver.Server.blockCacheFreeSize
+regionserver.Server.blockCacheHitCount
+regionserver.Server.blockCacheMissCount
+regionserver.Server.blockCacheSize
+regionserver.Server.compactionQueueLength
+regionserver.Server.flushQueueLength
+regionserver.Server.hlogFileCount
+regionserver.Server.hlogFileSize
+regionserver.Server.memStoreSize
+regionserver.Server.percentFilesLocal
+regionserver.Server.readRequestCount
+regionserver.Server.regionCount
+regionserver.Server.slowDeleteCount
+regionserver.Server.slowGetCount
+regionserver.Server.slowPutCount
+regionserver.Server.splitQueueLength
+regionserver.Server.staticBloomSize
+regionserver.Server.staticIndexSize
+regionserver.Server.storeCount
+regionserver.Server.storeFileCount
+regionserver.Server.storeFileIndexSize
+regionserver.Server.storeFileSize
+regionserver.Server.totalRequestCount
+regionserver.Server.writeRequestCount
+regionserver.WAL.AppendSize_75th_percentile
+regionserver.WAL.AppendSize_95th_percentile
+regionserver.WAL.AppendSize_99th_percentile
+regionserver.WAL.AppendSize_max
+regionserver.WAL.AppendSize_mean
+regionserver.WAL.AppendSize_median
+regionserver.WAL.AppendTime_75th_percentile
+regionserver.WAL.AppendTime_95th_percentile
+regionserver.WAL.AppendTime_99th_percentile
+regionserver.WAL.AppendTime_max
+regionserver.WAL.AppendTime_mean
+regionserver.WAL.AppendTime_median
+regionserver.WAL.SyncTime_75th_percentile
+regionserver.WAL.SyncTime_95th_percentile
+regionserver.WAL.SyncTime_99th_percentile
+regionserver.WAL.SyncTime_max
+regionserver.WAL.SyncTime_mean
+regionserver.WAL.SyncTime_median
+regionserver.WAL.SyncTime_num_ops
+regionserver.WAL.appendCount
+regionserver.WAL.slowAppendCount
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b986a3c2/ambari-metrics/ambari-metrics-timelineservice/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/pom.xml b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
index 9d269e7..d848eab 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/pom.xml
+++ b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
@@ -211,6 +211,9 @@
                   <location>conf/unix/metrics_whitelist</location>
                 </source>
                 <source>
+                  <location>conf/unix/amshbase_metrics_whitelist</location>
+                </source>
+                <source>
                   <location>target/embedded/${hbase.folder}/conf/hbase-site.xml</location>
                 </source>
               </sources>
@@ -252,7 +255,9 @@
         <configuration>
           <excludes>
             <exclude>conf/unix/metrics_whitelist</exclude>
+            <exclude>conf/unix/amshbase_metrics_whitelist</exclude>
             <exclude>conf/windows/metrics_whitelist</exclude>
+            <exclude>conf/windows/amshbase_metrics_whitelist</exclude>
           </excludes>
         </configuration>
         <executions>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b986a3c2/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
index ccce3ff..575b8ea 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
@@ -114,7 +114,7 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
       }
 
       //Initialize whitelisting & blacklisting if needed
-      TimelineMetricsFilter.initializeMetricFilter(metricsConf);
+      TimelineMetricsFilter.initializeMetricFilter(configuration);
 
       defaultTopNHostsLimit = Integer.parseInt(metricsConf.get(DEFAULT_TOPN_HOSTS_LIMIT, "20"));
       if (Boolean.parseBoolean(metricsConf.get(USE_GROUPBY_AGGREGATOR_QUERIES, "true"))) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/b986a3c2/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
index 92d595e..b7b9968 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
@@ -17,17 +17,25 @@
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
 import java.net.InetAddress;
 import java.net.MalformedURLException;
 import java.net.URISyntaxException;
 import java.net.URL;
 import java.net.UnknownHostException;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
 
 /**
  * Configuration class that reads properties from ams-site.xml. All values
@@ -265,6 +273,8 @@ public class TimelineMetricConfiguration {
 
   public static final String DEFAULT_INSTANCE_PORT = "12001";
 
+  public static final String AMSHBASE_METRICS_WHITESLIST_FILE = "amshbase_metrics_whitelist";
+
   private Configuration hbaseConf;
   private Configuration metricsConf;
   private Configuration amsEnvConf;
@@ -426,4 +436,33 @@ public class TimelineMetricConfiguration {
   public boolean isSecurityEnabled() {
     return hbaseConf.get("hbase.security.authentication", "").equals("kerberos");
   }
+
+  public Set<String> getAmshbaseWhitelist() {
+
+    ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
+    if (classLoader == null) {
+      classLoader = getClass().getClassLoader();
+    }
+
+    BufferedReader br = null;
+    String strLine;
+    Set<String> whitelist = new HashSet<>();
+
+    try(InputStream inputStream = classLoader.getResourceAsStream(AMSHBASE_METRICS_WHITESLIST_FILE)) {
+      br = new BufferedReader(new InputStreamReader(inputStream));
+
+      while ((strLine = br.readLine()) != null)   {
+        strLine = strLine.trim();
+        if (StringUtils.isEmpty(strLine)) {
+          continue;
+        }
+        whitelist.add(strLine);
+      }
+    } catch (IOException ioEx) {
+      LOG.error("Unable to parse ams-hbase metric whitelist file", ioEx);
+      return Collections.EMPTY_SET;
+    }
+
+    return whitelist;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b986a3c2/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricsFilter.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricsFilter.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricsFilter.java
index d05353d..0fe979e 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricsFilter.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricsFilter.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
+import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -27,6 +28,7 @@ import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import java.io.BufferedReader;
 import java.io.FileInputStream;
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.util.Arrays;
 import java.util.HashSet;
@@ -41,35 +43,44 @@ public class TimelineMetricsFilter {
 
   private static Set<String> whitelistedMetrics;
   private static Set<Pattern> whitelistedMetricPatterns;
-  private static boolean metricWhitelistingEnabled;
-  private static boolean appBlacklistingEnabled;
   private static Set<String> blacklistedApps;
   private static String patternPrefix = "._p_";
+  private static Set<String> amshbaseWhitelist;
 
   private static final Log LOG = LogFactory.getLog(TimelineMetricsFilter.class);
 
-  public static void initializeMetricFilter(Configuration metricsConf) {
+  public static void initializeMetricFilter(TimelineMetricConfiguration configuration) {
+
+    Configuration metricsConf = null;
+    try {
+      metricsConf = configuration.getMetricsConf();
+    } catch (Exception e) {
+      LOG.error("Error fetching metrics configuration for getting whitelisting information");
+      return;
+    }
 
     whitelistedMetrics = new HashSet<String>();
     whitelistedMetricPatterns = new HashSet<Pattern>();
     blacklistedApps = new HashSet<>();
-    metricWhitelistingEnabled = false;
-    appBlacklistingEnabled = false;
+    amshbaseWhitelist = new HashSet<>();
 
     String whitelistFile = metricsConf.get(TIMELINE_METRICS_WHITELIST_FILE, "");
     if (!StringUtils.isEmpty(whitelistFile)) {
-      metricWhitelistingEnabled = true;
       readMetricWhitelistFromFile(whitelistFile);
     }
 
     String appsBlacklist = metricsConf.get(TIMELINE_METRICS_APPS_BLACKLIST, "");
     if (!StringUtils.isEmpty(appsBlacklist)) {
-      appBlacklistingEnabled = true;
       for (String app : appsBlacklist.split(",")) {
         blacklistedApps.add(app);
       }
       LOG.info("Blacklisted apps : " + blacklistedApps.toString());
     }
+
+    amshbaseWhitelist = configuration.getAmshbaseWhitelist();
+    if (CollectionUtils.isNotEmpty(amshbaseWhitelist)) {
+      LOG.info("Whitelisting " + amshbaseWhitelist.size() + " ams-hbase metrics");
+    }
   }
 
   private static void readMetricWhitelistFromFile(String whitelistFile) {
@@ -108,16 +119,17 @@ public class TimelineMetricsFilter {
 
   public static boolean acceptMetric(TimelineMetric metric) {
 
-    if (!isEnabled()) {
-      return true;
-    }
-
     // App takes precedence.
-    if (blacklistedApps.contains(metric.getAppId())) {
+    if (CollectionUtils.isNotEmpty(blacklistedApps) && blacklistedApps.contains(metric.getAppId())) {
       return false;
     }
 
-    if (!metricWhitelistingEnabled) {
+    //Special Case appId = ams-hbase whitelisting.
+    if ("ams-hbase".equals(metric.getAppId()) && CollectionUtils.isNotEmpty(amshbaseWhitelist)) {
+      return amshbaseWhitelist.contains(metric.getMetricName());
+    }
+
+    if (CollectionUtils.isEmpty(whitelistedMetrics) && CollectionUtils.isEmpty(whitelistedMetricPatterns)) {
       return true;
     }
 
@@ -139,15 +151,8 @@ public class TimelineMetricsFilter {
   public static void addToWhitelist(String metricName) {
 
     if (StringUtils.isNotEmpty(metricName)) {
-      if (!metricWhitelistingEnabled)  {
-        metricWhitelistingEnabled = true;
-      }
-
       whitelistedMetrics.add(metricName);
     }
   }
 
-  public static boolean isEnabled() {
-    return appBlacklistingEnabled || metricWhitelistingEnabled;
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b986a3c2/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricsFilterTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricsFilterTest.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricsFilterTest.java
index 049d473..73c66fe 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricsFilterTest.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricsFilterTest.java
@@ -17,20 +17,31 @@
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+
 import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.easymock.EasyMock;
 import org.junit.Test;
+
 import java.net.URL;
+import java.util.HashSet;
+import java.util.Set;
 
 public class TimelineMetricsFilterTest {
 
   @Test
-  public void testAppBlacklisting() {
+  public void testAppBlacklisting() throws Exception{
 
     Configuration metricsConf = new Configuration();
     metricsConf.set("timeline.metrics.apps.blacklist", "hbase,datanode,nimbus");
-    TimelineMetricsFilter.initializeMetricFilter(metricsConf);
+    TimelineMetricConfiguration configuration = EasyMock.createNiceMock(TimelineMetricConfiguration.class);
+    expect(configuration.getMetricsConf()).andReturn(metricsConf).once();
+    replay(configuration);
+
+    TimelineMetricsFilter.initializeMetricFilter(configuration);
 
     TimelineMetric timelineMetric = new TimelineMetric();
 
@@ -48,10 +59,14 @@ public class TimelineMetricsFilterTest {
   public void testMetricWhitelisting() throws Exception {
 
     Configuration metricsConf = new Configuration();
+    TimelineMetricConfiguration configuration = EasyMock.createNiceMock(TimelineMetricConfiguration.class);
+    expect(configuration.getMetricsConf()).andReturn(metricsConf).once();
+    replay(configuration);
+
     URL fileUrl = ClassLoader.getSystemResource("test_data/metric_whitelist.dat");
 
     metricsConf.set("timeline.metrics.whitelist.file", fileUrl.getPath());
-    TimelineMetricsFilter.initializeMetricFilter(metricsConf);
+    TimelineMetricsFilter.initializeMetricFilter(configuration);
 
     TimelineMetric timelineMetric = new TimelineMetric();
 
@@ -73,11 +88,14 @@ public class TimelineMetricsFilterTest {
 
     Configuration metricsConf = new Configuration();
     metricsConf.set("timeline.metrics.apps.blacklist", "hbase,datanode,nimbus");
+    TimelineMetricConfiguration configuration = EasyMock.createNiceMock(TimelineMetricConfiguration.class);
+    expect(configuration.getMetricsConf()).andReturn(metricsConf).once();
+    replay(configuration);
 
     URL fileUrl = ClassLoader.getSystemResource("test_data/metric_whitelist.dat");
     metricsConf.set("timeline.metrics.whitelist.file", fileUrl.getPath());
 
-    TimelineMetricsFilter.initializeMetricFilter(metricsConf);
+    TimelineMetricsFilter.initializeMetricFilter(configuration);
 
     TimelineMetric timelineMetric = new TimelineMetric();
 
@@ -96,4 +114,37 @@ public class TimelineMetricsFilterTest {
     Assert.assertTrue(TimelineMetricsFilter.acceptMetric(timelineMetric));
   }
 
+  @Test
+  public void testAmshbaseWhitelisting() throws Exception {
+
+    TimelineMetricConfiguration configuration = EasyMock.createNiceMock(TimelineMetricConfiguration.class);
+
+    Configuration metricsConf = new Configuration();
+    expect(configuration.getMetricsConf()).andReturn(metricsConf).once();
+
+    Set<String> whitelist = new HashSet();
+    whitelist.add("regionserver.Server.Delete_99th_percentile");
+    whitelist.add("regionserver.Server.Delete_max");
+    whitelist.add("regionserver.Server.Delete_mean");
+    expect(configuration.getAmshbaseWhitelist()).andReturn(whitelist).once();
+
+    replay(configuration);
+
+    TimelineMetricsFilter.initializeMetricFilter(configuration);
+
+    TimelineMetric timelineMetric = new TimelineMetric();
+
+    timelineMetric.setMetricName("regionserver.Server.Delete_max");
+    timelineMetric.setAppId("ams-hbase");
+    Assert.assertTrue(TimelineMetricsFilter.acceptMetric(timelineMetric));
+
+    timelineMetric.setMetricName("regionserver.Server.Delete_min3333");
+    timelineMetric.setAppId("ams-hbase");
+    Assert.assertFalse(TimelineMetricsFilter.acceptMetric(timelineMetric));
+
+    timelineMetric.setMetricName("jvm.JvmMetrics.MemHeapUsedM");
+    timelineMetric.setAppId("hbase");
+    Assert.assertTrue(TimelineMetricsFilter.acceptMetric(timelineMetric));
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b986a3c2/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
index c4cf51d..b243e0b 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
@@ -17,18 +17,23 @@
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
 
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+
 import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.AbstractMiniHBaseClusterTest;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricsFilter;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.AggregatorUtils;
+import org.easymock.EasyMock;
 import org.junit.Before;
 import org.junit.Test;
 
 import java.io.IOException;
+import java.net.URISyntaxException;
 import java.sql.SQLException;
 import java.util.Map;
 import java.util.Set;
@@ -38,7 +43,7 @@ public class TestMetadataManager extends AbstractMiniHBaseClusterTest {
   TimelineMetricMetadataManager metadataManager;
 
   @Before
-  public void insertDummyRecords() throws IOException, SQLException {
+  public void insertDummyRecords() throws IOException, SQLException, URISyntaxException {
     // Initialize new manager
     metadataManager = new TimelineMetricMetadataManager(hdb, new Configuration());
     final long now = System.currentTimeMillis();
@@ -87,7 +92,11 @@ public class TestMetadataManager extends AbstractMiniHBaseClusterTest {
     }});
     timelineMetrics.getMetrics().add(metric3);
 
-    TimelineMetricsFilter.initializeMetricFilter(new Configuration());
+    Configuration metricsConf = new Configuration();
+    TimelineMetricConfiguration configuration = EasyMock.createNiceMock(TimelineMetricConfiguration.class);
+    expect(configuration.getMetricsConf()).andReturn(metricsConf).once();
+    replay(configuration);
+    TimelineMetricsFilter.initializeMetricFilter(configuration);
     TimelineMetricsFilter.addToWhitelist("dummy_metric1");
     TimelineMetricsFilter.addToWhitelist("dummy_metric2");
 


[41/50] ambari git commit: AMBARI-20050. Issue while importing workflow with insufficient permissions.(Madhan Mohan Reddy via gauravn7)

Posted by nc...@apache.org.
AMBARI-20050. Issue while importing workflow with insufficient permissions.(Madhan Mohan Reddy via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/890ad905
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/890ad905
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/890ad905

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 890ad905df0ae35c4f3d7ea6c4335fefdb05190b
Parents: e1cb3b1
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Thu Feb 23 19:14:35 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Thu Feb 23 19:14:35 2017 +0530

----------------------------------------------------------------------
 .../apache/oozie/ambari/view/OozieDelegate.java |  12 +-
 .../ambari/view/OozieProxyImpersonator.java     | 381 +++++++------------
 .../oozie/ambari/view/assets/AssetResource.java |  93 +++--
 .../WorkflowsManagerResource.java               |  36 +-
 .../ui/app/components/bundle-config.js          |   8 +-
 .../resources/ui/app/components/coord-config.js |   6 +-
 .../ui/app/components/designer-errors.js        |  49 +++
 .../ui/app/components/flow-designer.js          |  69 +---
 .../src/main/resources/ui/app/styles/app.less   |  13 +-
 .../app/templates/components/bundle-config.hbs  |   4 +-
 .../app/templates/components/coord-config.hbs   |   2 +-
 .../templates/components/designer-errors.hbs    |  17 +
 .../app/templates/components/flow-designer.hbs  |  18 +-
 13 files changed, 330 insertions(+), 378 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java
index 55c4312..6f3c4d2 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieDelegate.java
@@ -32,6 +32,8 @@ import javax.ws.rs.core.Response;
 
 import org.apache.ambari.view.ViewContext;
 import org.apache.commons.io.IOUtils;
+import org.apache.oozie.ambari.view.exception.ErrorCode;
+import org.apache.oozie.ambari.view.exception.WfmException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -90,10 +92,16 @@ public class OozieDelegate {
 
     LOGGER.info("Resp from oozie status entity=="
       + serviceResponse.getEntity());
+    String oozieResp=null;
     if (serviceResponse.getEntity() instanceof String) {
-      return (String) serviceResponse.getEntity();
+      oozieResp= (String) serviceResponse.getEntity();
     } else {
-      return "success";
+      oozieResp= serviceResponse.getEntity().toString();
+    }
+    if (oozieResp != null && oozieResp.trim().startsWith("{")) {
+      return  oozieResp;
+    }else{
+      throw new WfmException(oozieResp,ErrorCode.OOZIE_SUBMIT_ERROR);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
index 6603a9c..c4e5bbd 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
@@ -49,9 +49,10 @@ import javax.ws.rs.core.UriInfo;
 import org.apache.ambari.view.ViewContext;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.exception.ExceptionUtils;
-import org.apache.hadoop.security.AccessControlException;
 import org.apache.oozie.ambari.view.assets.AssetResource;
+import org.apache.oozie.ambari.view.exception.ErrorCode;
+import org.apache.oozie.ambari.view.exception.WfmException;
+import org.apache.oozie.ambari.view.exception.WfmWebException;
 import org.apache.oozie.ambari.view.workflowmanager.WorkflowManagerService;
 import org.apache.oozie.ambari.view.workflowmanager.WorkflowsManagerResource;
 import org.slf4j.Logger;
@@ -83,30 +84,7 @@ public class OozieProxyImpersonator {
   private final OozieUtils oozieUtils = new OozieUtils();
   private final AssetResource assetResource;
 
-  private enum ErrorCodes {
-    OOZIE_SUBMIT_ERROR("error.oozie.submit", "Oozie Submit error"), OOZIE_IO_ERROR(
-      "error.oozie.io", "Oozie I/O error"), FILE_ACCESS_ACL_ERROR(
-      "error.file.access.control",
-      "Access Error to file due to access control"), FILE_ACCESS_UNKNOWN_ERROR(
-      "error.file.access", "Error accessing file"), WORKFLOW_PATH_EXISTS(
-      "error.workflow.path.exists", "Workflow Path exists"), WORKFLOW_XML_DOES_NOT_EXIST(
-      "error.workflow.xml.not.exists", "Workflow Xml does not exist");
-    private String errorCode;
-    private String description;
-
-    ErrorCodes(String errorCode, String description) {
-      this.errorCode = errorCode;
-      this.description = description;
-    }
-
-    public String getErrorCode() {
-      return errorCode;
-    }
 
-    public String getDescription() {
-      return description;
-    }
-  }
   private static enum WorkflowFormat{
     XML("xml"),
     DRAFT("draft");
@@ -139,15 +117,23 @@ public class OozieProxyImpersonator {
   @GET
   @Path("hdfsCheck")
   public Response hdfsCheck(){
-    hdfsFileUtils.hdfsCheck();
-    return Response.ok().build();
+    try {
+      hdfsFileUtils.hdfsCheck();
+      return Response.ok().build();
+    }catch (Exception e){
+      throw new WfmWebException(e);
+    }
   }
 
   @GET
   @Path("homeDirCheck")
   public Response homeDirCheck(){
-    hdfsFileUtils.homeDirCheck();
-    return Response.ok().build();
+    try{
+      hdfsFileUtils.homeDirCheck();
+      return Response.ok().build();
+    }catch (Exception e){
+      throw new WfmWebException(e);
+    }
   }
 
   @Path("/fileServices")
@@ -189,57 +175,89 @@ public class OozieProxyImpersonator {
                             @QueryParam("projectId") String projectId,
                             @DefaultValue("false") @QueryParam("overwrite") Boolean overwrite,
                             @QueryParam("description") String description,
-                            @QueryParam("jobType") String jobType) {
+                            @QueryParam("jobType") String jobTypeString) {
     LOGGER.info("submit workflow job called");
-    return submitJobInternal(postBody, headers, ui, appPath, overwrite,
-      JobType.valueOf(jobType), projectId, description);
+    JobType jobType = JobType.valueOf(jobTypeString);
+    if (StringUtils.isEmpty(appPath)) {
+      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
+    }
+    appPath = workflowFilesService.getWorkflowFileName(appPath.trim(), jobType);
+    try {
+      if (!overwrite) {
+        boolean fileExists = hdfsFileUtils.fileExists(appPath);
+        if (fileExists) {
+          throw new WfmWebException(ErrorCode.WORKFLOW_PATH_EXISTS);
+        }
+      }
+      postBody = utils.formatXml(postBody);
+
+      String filePath = workflowFilesService.createFile(appPath, postBody, overwrite);
+      LOGGER.info(String.format("submit workflow job done. filePath=[%s]", filePath));
+
+      if (PROJ_MANAGER_ENABLED) {
+        String name = oozieUtils.deduceWorkflowNameFromXml(postBody);
+        workflowManagerService.saveWorkflow(projectId, appPath, jobType,
+          null, viewContext.getUsername(), name);
+      }
+      String response = oozieDelegate.submitWorkflowJobToOozie(headers,
+        appPath, ui.getQueryParameters(), jobType);
+      return Response.status(Status.OK).entity(response).build();
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch(WfmException ex){
+      throw new WfmWebException(ex,ex.getErrorCode());
+    } catch(Exception ex) {
+      throw new WfmWebException(ex);
+    }
   }
 
   @POST
   @Path("/saveWorkflow")
   @Consumes({MediaType.TEXT_PLAIN + "," + MediaType.TEXT_XML})
   public Response saveWorkflow(String postBody, @Context HttpHeaders headers,
-                               @Context UriInfo ui, @QueryParam("app.path") String appPath, @QueryParam("jobType") String jobTypeStr,
+                               @Context UriInfo ui, @QueryParam("app.path") String appPath,
+                               @QueryParam("jobType") String jobTypeStr,
                                @DefaultValue("false") @QueryParam("overwrite") Boolean overwrite) {
     LOGGER.info("save workflow  called");
     if (StringUtils.isEmpty(appPath)) {
-      throw new RuntimeException("app path can't be empty.");
+      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
     }
     JobType jobType = StringUtils.isEmpty(jobTypeStr) ? JobType.WORKFLOW : JobType.valueOf(jobTypeStr);
     String workflowFilePath = workflowFilesService.getWorkflowFileName(appPath.trim(), jobType);
-    if (!overwrite) {
-      boolean fileExists = hdfsFileUtils.fileExists(workflowFilePath);
-      if (fileExists) {
-        return getFileExistsResponse();
-      }
-    }
-
     try {
+      if (!overwrite) {
+        boolean fileExists = hdfsFileUtils.fileExists(workflowFilePath);
+        if (fileExists) {
+          throw new WfmWebException(ErrorCode.WORKFLOW_PATH_EXISTS);
+        }
+      }
       if (utils.isXml(postBody)) {
         saveWorkflowXml(jobType, appPath, postBody, overwrite);
       } else {
         saveDraft(jobType, appPath, postBody, overwrite);
       }
       if (PROJ_MANAGER_ENABLED) {
-        workflowManagerService.saveWorkflow(null, workflowFilePath,
-          jobType, null,
+        workflowManagerService.saveWorkflow(null, workflowFilePath, jobType, null,
           viewContext.getUsername(), getWorkflowName(postBody));
       }
-    } catch (IOException ex) {
-      return getRespCodeForException(ex);
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch (Exception ex) {
+       throw new WfmWebException(ex);
     }
-
     return Response.ok().build();
   }
-  private String getWorkflowName(String postBody){
+
+  private String getWorkflowName(String postBody) {
     if (utils.isXml(postBody)) {
       return oozieUtils.deduceWorkflowNameFromXml(postBody);
-    }else{
+    } else {
       return oozieUtils.deduceWorkflowNameFromJson(postBody);
     }
   }
 
-  private void saveWorkflowXml(JobType jobType, String appPath, String postBody, Boolean overwrite) throws IOException {
+  private void saveWorkflowXml(JobType jobType, String appPath, String postBody,
+                               Boolean overwrite) throws IOException {
     appPath = workflowFilesService.getWorkflowFileName(appPath.trim(), jobType);
     postBody = utils.formatXml(postBody);
     workflowFilesService.createFile(appPath, postBody, overwrite);
@@ -267,49 +285,47 @@ public class OozieProxyImpersonator {
                                @DefaultValue("false") @QueryParam("overwrite") Boolean overwrite) {
     LOGGER.info("publish asset called");
     if (StringUtils.isEmpty(uploadPath)) {
-      throw new RuntimeException("upload path can't be empty.");
+      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
     }
     uploadPath = uploadPath.trim();
-    Map<String, String> validateAsset = assetResource.validateAsset(headers, postBody,
-      ui.getQueryParameters());
-    if (!STATUS_OK.equals(validateAsset.get(STATUS_KEY))) {
-      return Response.status(Status.BAD_REQUEST).entity(
-        validateAsset.get(MESSAGE_KEY)).build();
+    try {
+      Map<String, String> validateAsset = assetResource.validateAsset(headers, postBody,
+        ui.getQueryParameters());
+      if (!STATUS_OK.equals(validateAsset.get(STATUS_KEY))) {
+        WfmWebException wfmEx=new WfmWebException(ErrorCode.INVALID_ASSET_INPUT);
+        wfmEx.setAdditionalDetail(validateAsset.get(MESSAGE_KEY));
+        throw wfmEx;
+      }
+      return saveAsset(postBody, uploadPath, overwrite);
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new WfmWebException(ex);
     }
-    return saveAsset(postBody, uploadPath, overwrite);
   }
 
-  private Response saveAsset(String postBody, String uploadPath,
-                             Boolean overwrite) {
+  private Response saveAsset(String postBody, String uploadPath, Boolean overwrite) throws IOException {
     uploadPath = workflowFilesService.getAssetFileName(uploadPath);
     if (!overwrite) {
       boolean fileExists = hdfsFileUtils.fileExists(uploadPath);
       if (fileExists) {
-        return getFileExistsResponse();
+        throw new WfmWebException(ErrorCode.WORKFLOW_PATH_EXISTS);
       }
     }
     postBody = utils.formatXml(postBody);
-    try {
-      String filePath = workflowFilesService.createAssetFile(uploadPath,
-        postBody, overwrite);
-      LOGGER.info(String.format("publish asset job done. filePath=[%s]",
-        filePath));
-      return Response.ok().build();
-    } catch (Exception ex) {
-      LOGGER.error(ex.getMessage(), ex);
-      return getRespCodeForException(ex);
-    }
+    String filePath = workflowFilesService.createAssetFile(uploadPath, postBody, overwrite);
+    LOGGER.info(String.format("publish asset job done. filePath=[%s]", filePath));
+    return Response.ok().build();
   }
+
   @GET
   @Path("/readAsset")
-  public Response readAsset(
-          @QueryParam("assetPath") String assetPath) {
+  public Response readAsset(@QueryParam("assetPath") String assetPath) {
     if (StringUtils.isEmpty(assetPath)) {
-      throw new RuntimeException("assetPath can't be empty.");
+      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
     }
     try {
-      final InputStream is = workflowFilesService
-              .readAssset(assetPath);
+      final InputStream is = workflowFilesService.readAssset(assetPath);
       StreamingOutput streamer = new StreamingOutput() {
         @Override
         public void write(OutputStream os) throws IOException,
@@ -320,17 +336,16 @@ public class OozieProxyImpersonator {
         }
       };
       return Response.ok(streamer).status(200).build();
-    } catch (IOException e) {
-      return getRespCodeForException(e);
+    } catch (IOException ex) {
+      throw new WfmWebException(ex);
     }
   }
 
-
   @GET
   @Path("/readWorkflowDraft")
   public Response readDraft(@QueryParam("workflowXmlPath") String workflowPath) {
     if (StringUtils.isEmpty(workflowPath)) {
-      throw new RuntimeException("workflowXmlPath can't be empty.");
+      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
     }
     try {
       final InputStream is = workflowFilesService.readDraft(workflowPath);
@@ -344,113 +359,37 @@ public class OozieProxyImpersonator {
         }
       };
       return Response.ok(streamer).status(200).build();
-    } catch (IOException e) {
-      return getRespCodeForException(e);
+    } catch (IOException ex) {
+      throw new WfmWebException(ex);
     }
   }
 
   @POST
   @Path("/discardWorkflowDraft")
   public Response discardDraft(
-    @QueryParam("workflowXmlPath") String workflowPath)
-    throws IOException {
-    workflowFilesService.discardDraft(workflowPath);
-    return Response.ok().build();
-  }
-
-  private Response submitJobInternal(String postBody, HttpHeaders headers,
-                                     UriInfo ui, String appPath, Boolean overwrite, JobType jobType,
-                                     String projectId, String description) {
-    if (StringUtils.isEmpty(appPath)) {
-      throw new RuntimeException("app path can't be empty.");
-    }
-    appPath = workflowFilesService.getWorkflowFileName(appPath.trim(), jobType);
-    if (!overwrite) {
-      boolean fileExists = hdfsFileUtils.fileExists(appPath);
-      if (fileExists) {
-        return getFileExistsResponse();
-      }
-    }
-    postBody = utils.formatXml(postBody);
+    @QueryParam("workflowXmlPath") String workflowPath) {
     try {
-      String filePath = workflowFilesService.createFile(appPath, postBody,
-        overwrite);
-      LOGGER.info(String.format(
-        "submit workflow job done. filePath=[%s]", filePath));
-    } catch (Exception ex) {
-      LOGGER.error(ex.getMessage(), ex);
-      return getRespCodeForException(ex);
-
-    }
-    if (PROJ_MANAGER_ENABLED) {
-      String name = oozieUtils.deduceWorkflowNameFromXml(postBody);
-      workflowManagerService.saveWorkflow(projectId, appPath, jobType,
-        "todo description", viewContext.getUsername(), name);
-    }
-
-    String response = oozieDelegate.submitWorkflowJobToOozie(headers,
-      appPath, ui.getQueryParameters(), jobType);
-    if (response != null && response.trim().startsWith("{")) {
-      // dealing with oozie giving error but with 200 response.
-      return Response.status(Response.Status.OK).entity(response).build();
-    } else {
-      HashMap<String, String> resp = new HashMap<String, String>();
-      resp.put("status", ErrorCodes.OOZIE_SUBMIT_ERROR.getErrorCode());
-      resp.put("message", response);
-      return Response.status(Response.Status.BAD_REQUEST).entity(resp)
-        .build();
+      workflowFilesService.discardDraft(workflowPath);
+      return Response.ok().build();
+    } catch (IOException ex) {
+      throw new WfmWebException(ex);
     }
-
-  }
-
-  private Response getRespCodeForException(Exception ex) {
-    if (ex instanceof AccessControlException) {
-      HashMap<String, String> errorDetails = getErrorDetails(
-        ErrorCodes.FILE_ACCESS_ACL_ERROR.getErrorCode(),
-        ErrorCodes.FILE_ACCESS_ACL_ERROR.getDescription(), ex);
-      return Response.status(Response.Status.BAD_REQUEST)
-        .entity(errorDetails).build();
-    } else if (ex instanceof IOException) {
-      HashMap<String, String> errorDetails = getErrorDetails(
-        ErrorCodes.FILE_ACCESS_UNKNOWN_ERROR.getErrorCode(),
-        ErrorCodes.FILE_ACCESS_UNKNOWN_ERROR.getDescription(), ex);
-      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
-        .entity(errorDetails).build();
-    } else {
-      HashMap<String, String> errorDetails = getErrorDetails(
-        ErrorCodes.FILE_ACCESS_UNKNOWN_ERROR.getErrorCode(),
-        ErrorCodes.FILE_ACCESS_UNKNOWN_ERROR.getDescription(), ex);
-      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
-        .entity(errorDetails).build();
-    }
-
-  }
-
-  private Response getFileExistsResponse() {
-    HashMap<String, String> resp = new HashMap<String, String>();
-    resp.put("status", ErrorCodes.WORKFLOW_PATH_EXISTS.getErrorCode());
-    resp.put("message", ErrorCodes.WORKFLOW_PATH_EXISTS.getDescription());
-    return Response.status(Response.Status.BAD_REQUEST).entity(resp)
-      .build();
   }
 
   @GET
   @Path("/readWorkflow")
   public Response readWorkflow(
     @QueryParam("workflowPath") String workflowPath, @QueryParam("jobType") String jobTypeStr) {
-    String workflowFileName=workflowFilesService.getWorkflowFileName(workflowPath, JobType.valueOf(jobTypeStr));
-    if (!hdfsFileUtils.fileExists(workflowFileName)){
-      HashMap<String,String> response=new HashMap<>();
-      response.put("status", ErrorCodes.WORKFLOW_XML_DOES_NOT_EXIST.getErrorCode());
-      response.put("message", ErrorCodes.WORKFLOW_XML_DOES_NOT_EXIST.getDescription());
-      return Response.status(Status.BAD_REQUEST).entity(response).build();
-    }
+    try {
+      String workflowFileName = workflowFilesService.getWorkflowFileName(workflowPath, JobType.valueOf(jobTypeStr));
+      if (!hdfsFileUtils.fileExists(workflowFileName)) {
+        throw new WfmWebException(ErrorCode.WORKFLOW_XML_DOES_NOT_EXIST);
+      }
+      WorkflowFileInfo workflowDetails = workflowFilesService
+        .getWorkflowDetails(workflowPath, JobType.valueOf(jobTypeStr));
+      if (workflowPath.endsWith(Constants.WF_DRAFT_EXTENSION) || workflowDetails.getIsDraftCurrent()) {
+        String filePath = workflowFilesService.getWorkflowDraftFileName(workflowPath, JobType.valueOf(jobTypeStr));
 
-    WorkflowFileInfo workflowDetails = workflowFilesService
-      .getWorkflowDetails(workflowPath, JobType.valueOf(jobTypeStr));
-    if (workflowPath.endsWith(Constants.WF_DRAFT_EXTENSION) || workflowDetails.getIsDraftCurrent()) {
-      String filePath = workflowFilesService.getWorkflowDraftFileName(workflowPath, JobType.valueOf(jobTypeStr));
-      try {
         InputStream inputStream = workflowFilesService.readWorkflowXml(filePath);
         String stringResponse = IOUtils.toString(inputStream);
         if (!workflowFilesService.isDraftFormatCurrent(stringResponse)) {
@@ -459,36 +398,35 @@ public class OozieProxyImpersonator {
         } else {
           return Response.ok(stringResponse).header(RESPONSE_TYPE, WorkflowFormat.DRAFT.getValue()).build();
         }
-      } catch (IOException e) {
-        return getRespCodeForException(e);
+      } else {
+        String filePath = workflowFilesService.getWorkflowFileName(workflowPath, JobType.valueOf(jobTypeStr));
+        return getWorkflowResponse(filePath, WorkflowFormat.XML.getValue(), false);
       }
-    } else {
-      String filePath = workflowFilesService.getWorkflowFileName(workflowPath, JobType.valueOf(jobTypeStr));
-      return getWorkflowResponse(filePath, WorkflowFormat.XML.getValue(), false);
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new WfmWebException(ex);
     }
   }
 
-  private Response getWorkflowResponse(String filePath, String responseType, boolean olderFormatDraftIngored) {
-    try {
-      final InputStream is = workflowFilesService
-        .readWorkflowXml(filePath);
-      StreamingOutput streamer = new StreamingOutput() {
-        @Override
-        public void write(OutputStream os) throws IOException,
-          WebApplicationException {
-          IOUtils.copy(is, os);
-          is.close();
-          os.close();
-        }
-      };
-      Response.ResponseBuilder responseBuilder = Response.ok(streamer).header(RESPONSE_TYPE, responseType);
-      if(olderFormatDraftIngored){
-        responseBuilder.header(OLDER_FORMAT_DRAFT_INGORED,Boolean.TRUE.toString());
+  private Response getWorkflowResponse(String filePath, String responseType,
+                                       boolean olderFormatDraftIngored) throws IOException {
+    final InputStream is = workflowFilesService.readWorkflowXml(filePath);
+    StreamingOutput streamer = new StreamingOutput() {
+      @Override
+      public void write(OutputStream os) throws IOException,
+        WebApplicationException {
+        IOUtils.copy(is, os);
+        is.close();
+        os.close();
       }
-      return  responseBuilder.build();
-    } catch (IOException e) {
-      return getRespCodeForException(e);
+    };
+    Response.ResponseBuilder responseBuilder = Response.ok(streamer).header(RESPONSE_TYPE, responseType);
+    if (olderFormatDraftIngored) {
+      responseBuilder.header(OLDER_FORMAT_DRAFT_INGORED, Boolean.TRUE.toString());
     }
+    return responseBuilder.build();
+
   }
 
   @GET
@@ -496,12 +434,13 @@ public class OozieProxyImpersonator {
   public Response readWorkflowXml(
     @QueryParam("workflowXmlPath") String workflowPath,@QueryParam("jobType") String jobTypeStr) {
     if (StringUtils.isEmpty(workflowPath)) {
-      throw new RuntimeException("workflowXmlPath can't be empty.");
+      throw new WfmWebException(ErrorCode.INVALID_EMPTY_INPUT);
     }
-
     try {
-      final InputStream is = workflowFilesService
-        .readWorkflowXml(workflowPath);
+      if (!hdfsFileUtils.fileExists(workflowPath)) {
+        throw new WfmWebException(ErrorCode.WORKFLOW_XML_DOES_NOT_EXIST);
+      }
+      final InputStream is = workflowFilesService.readWorkflowXml(workflowPath);
       StreamingOutput streamer = new StreamingOutput() {
         @Override
         public void write(OutputStream os) throws IOException,
@@ -512,22 +451,11 @@ public class OozieProxyImpersonator {
         }
       };
       return Response.ok(streamer).status(200).build();
-    } catch (IOException e) {
-      return getRespCodeForException(e);
-    }
-  }
-
-  private HashMap<String, String> getErrorDetails(String status,
-                                                  String message, Exception ex) {
-    HashMap<String, String> resp = new HashMap<String, String>();
-    resp.put("status", status);
-    if (message != null) {
-      resp.put("message", message);
-    }
-    if (ex != null) {
-      resp.put("stackTrace", ExceptionUtils.getFullStackTrace(ex));
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new WfmWebException(ex);
     }
-    return resp;
   }
 
   @GET
@@ -538,8 +466,7 @@ public class OozieProxyImpersonator {
         .getPath(), ui.getQueryParameters(), HttpMethod.GET, null);
     } catch (Exception ex) {
       LOGGER.error("Error in GET proxy", ex);
-      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
-        .entity(getErrorDetailsForException("Oozie", ex)).build();
+      throw new WfmWebException(ex);
     }
   }
 
@@ -548,13 +475,11 @@ public class OozieProxyImpersonator {
   public Response handlePost(String xml, @Context HttpHeaders headers,
                              @Context UriInfo ui) {
     try {
-
       return oozieDelegate.consumeService(headers, ui.getAbsolutePath()
         .getPath(), ui.getQueryParameters(), HttpMethod.POST, xml);
     } catch (Exception ex) {
       LOGGER.error("Error in POST proxy", ex);
-      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
-        .entity(getErrorDetailsForException("Oozie", ex)).build();
+      throw new WfmWebException(ex);
     }
   }
 
@@ -567,8 +492,7 @@ public class OozieProxyImpersonator {
         .getPath(), ui.getQueryParameters(), HttpMethod.POST, null);
     } catch (Exception ex) {
       LOGGER.error("Error in DELETE proxy", ex);
-      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
-        .entity(getErrorDetailsForException("Oozie", ex)).build();
+      throw new WfmWebException(ex);
     }
   }
 
@@ -581,22 +505,7 @@ public class OozieProxyImpersonator {
         .getPath(), ui.getQueryParameters(), HttpMethod.PUT, body);
     } catch (Exception ex) {
       LOGGER.error("Error in PUT proxy", ex);
-      return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
-        .entity(getErrorDetailsForException("Oozie", ex)).build();
-    }
-  }
-
-  private Map<String, String> getErrorDetailsForException(String component,
-                                                          Exception ex) {
-    String errorCode = component + "exception";
-    String errorMessage = component + " Exception";
-    if (ex instanceof RuntimeException) {
-      Throwable cause = ex.getCause();
-      if (cause instanceof IOException) {
-        errorCode = component + "io.exception";
-        errorMessage = component + "IO Exception";
-      }
+      throw new WfmWebException(ex);
     }
-    return getErrorDetails(errorCode, errorMessage, ex);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
index ef3b508..3355c85 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
@@ -25,13 +25,15 @@ import org.apache.oozie.ambari.view.*;
 import org.apache.oozie.ambari.view.assets.model.ActionAsset;
 import org.apache.oozie.ambari.view.assets.model.ActionAssetDefinition;
 import org.apache.oozie.ambari.view.assets.model.AssetDefintion;
+import org.apache.oozie.ambari.view.exception.ErrorCode;
+import org.apache.oozie.ambari.view.exception.WfmException;
+import org.apache.oozie.ambari.view.exception.WfmWebException;
 import org.apache.oozie.ambari.view.model.APIResult;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import javax.ws.rs.*;
 import javax.ws.rs.core.*;
-import javax.ws.rs.core.Response.Status;
 import java.io.IOException;
 import java.util.*;
 
@@ -65,7 +67,7 @@ public class AssetResource {
       result.setData(assets);
       return Response.ok(result).build();
     } catch (Exception e) {
-      throw new ServiceFormattedException(e);
+      throw new WfmWebException(e);
     }
   }
 
@@ -80,7 +82,7 @@ public class AssetResource {
       result.setData(assets);
       return Response.ok(result).build();
     } catch (Exception e) {
-      throw new ServiceFormattedException(e);
+      throw new WfmWebException(e);
     }
   }
   @POST
@@ -88,19 +90,20 @@ public class AssetResource {
                             @QueryParam("id") String id, @Context UriInfo ui, String body) {
     try {
       Gson gson = new Gson();
-      AssetDefintion assetDefinition = gson.fromJson(body,
-        AssetDefintion.class);
+      AssetDefintion assetDefinition = gson.fromJson(body, AssetDefintion.class);
       Map<String, String> validateAsset = validateAsset(headers,
         assetDefinition.getDefinition(), ui.getQueryParameters());
       if (!STATUS_OK.equals(validateAsset.get(STATUS_KEY))) {
-        return Response.status(Status.BAD_REQUEST).build();
+        throw new WfmWebException(ErrorCode.ASSET_INVALID_FROM_OOZIE);
       }
       assetService.saveAsset(id, viewContext.getUsername(), assetDefinition);
       APIResult result = new APIResult();
       result.setStatus(APIResult.Status.SUCCESS);
       return Response.ok(result).build();
-    } catch (Exception e) {
-      throw new ServiceFormattedException(e);
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new WfmWebException(ex);
     }
   }
 
@@ -113,43 +116,50 @@ public class AssetResource {
   public Map<String, String> validateAsset(HttpHeaders headers,
                                            String postBody, MultivaluedMap<String, String> queryParams) {
     String workflowXml = oozieUtils.generateWorkflowXml(postBody);
+    Map<String, String> result = new HashMap<>();
+    String tempWfPath = "/tmp" + "/tmpooziewfs/tempwf_" + Math.round(Math.random() * 100000) + ".xml";
     try {
-      Map<String, String> result = new HashMap<>();
-      String tempWfPath = "/tmp" + "/tmpooziewfs/tempwf_" + Math.round(Math.random()*100000) + ".xml";
       hdfsFileUtils.writeToFile(tempWfPath, workflowXml, true);
-      queryParams.put("oozieparam.action", getAsList("dryrun"));
-      queryParams.put("oozieconfig.rerunOnFailure", getAsList("false"));
-      queryParams.put("oozieconfig.useSystemLibPath", getAsList("true"));
-      queryParams.put("resourceManager", getAsList("useDefault"));
-      String dryRunResp = oozieDelegate.submitWorkflowJobToOozie(headers,
-        tempWfPath, queryParams, JobType.WORKFLOW);
-      LOGGER.info(String.format("resp from validating asset=[%s]",
-        dryRunResp));
+    } catch (IOException e) {
+      throw new WfmWebException(e, ErrorCode.FILE_ACCESS_UNKNOWN_ERROR);
+    }
+    queryParams.put("oozieparam.action", getAsList("dryrun"));
+    queryParams.put("oozieconfig.rerunOnFailure", getAsList("false"));
+    queryParams.put("oozieconfig.useSystemLibPath", getAsList("true"));
+    queryParams.put("resourceManager", getAsList("useDefault"));
+    String dryRunResp = oozieDelegate.submitWorkflowJobToOozie(headers,
+      tempWfPath, queryParams, JobType.WORKFLOW);
+    LOGGER.info(String.format("resp from validating asset=[%s]", dryRunResp));
+    try {
       hdfsFileUtils.deleteFile(tempWfPath);
-      if (dryRunResp != null && dryRunResp.trim().startsWith("{")) {
-        JsonElement jsonElement = new JsonParser().parse(dryRunResp);
-        JsonElement idElem = jsonElement.getAsJsonObject().get("id");
-        if (idElem != null) {
-          result.put(STATUS_KEY, STATUS_OK);
-        } else {
-          result.put(STATUS_KEY, STATUS_FAILED);
-          result.put(MESSAGE_KEY, dryRunResp);
-        }
+    } catch (IOException e) {
+      throw new WfmWebException(e, ErrorCode.FILE_ACCESS_UNKNOWN_ERROR);
+    }
+    if (dryRunResp != null && dryRunResp.trim().startsWith("{")) {
+      JsonElement jsonElement = new JsonParser().parse(dryRunResp);
+      JsonElement idElem = jsonElement.getAsJsonObject().get("id");
+      if (idElem != null) {
+        result.put(STATUS_KEY, STATUS_OK);
       } else {
         result.put(STATUS_KEY, STATUS_FAILED);
         result.put(MESSAGE_KEY, dryRunResp);
       }
-      return result;
-    } catch (IOException e) {
-      throw new RuntimeException(e);
+    } else {
+      result.put(STATUS_KEY, STATUS_FAILED);
+      result.put(MESSAGE_KEY, dryRunResp);
     }
+    return result;
   }
 
   @GET
   @Path("/assetNameAvailable")
   public Response assetNameAvailable(@QueryParam("name") String name){
-    boolean available=assetService.isAssetNameAvailable(name);
-    return Response.ok(available).build();
+    try {
+      boolean available = assetService.isAssetNameAvailable(name);
+      return Response.ok(available).build();
+    }catch (Exception e){
+      throw new WfmWebException(e);
+    }
   }
 
   @GET
@@ -162,7 +172,7 @@ public class AssetResource {
       result.setData(assetDefinition);
       return Response.ok(result).build();
     } catch (Exception e) {
-      throw new ServiceFormattedException(e);
+      throw new WfmWebException(e);
     }
   }
 
@@ -170,14 +180,13 @@ public class AssetResource {
   @Path("/definition/id}")
   public Response getAssetDefinition(@PathParam("defnitionId") String id) {
     try {
-      ActionAssetDefinition assetDefinition = assetService
-        .getAssetDefinition(id);
+      ActionAssetDefinition assetDefinition = assetService.getAssetDefinition(id);
       APIResult result = new APIResult();
       result.setStatus(APIResult.Status.SUCCESS);
       result.setData(assetDefinition);
       return Response.ok(result).build();
     } catch (Exception e) {
-      throw new ServiceFormattedException(e);
+      throw new WfmWebException(e);
     }
   }
 
@@ -187,19 +196,19 @@ public class AssetResource {
     try {
       ActionAsset asset = assetService.getAsset(id);
       if (asset == null) {
-        throw new RuntimeException("Asset doesnt exist");
+        throw new WfmWebException(ErrorCode.ASSET_NOT_EXIST);
       }
       if (!viewContext.getUsername().equals(asset.getOwner())){
-        throw new RuntimeException(
-          "Dont have permission to delete this asset");
+        throw new WfmWebException(ErrorCode.PERMISSION_ERROR);
       }
       assetService.deleteAsset(id);
       APIResult result = new APIResult();
       result.setStatus(APIResult.Status.SUCCESS);
       return Response.ok(result).build();
-    } catch (Exception e) {
-      throw new ServiceFormattedException(e);
+    } catch (WfmWebException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new WfmWebException(ex);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java
index a0aa234..e1a5808 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsManagerResource.java
@@ -26,8 +26,10 @@ import javax.ws.rs.GET;
 import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Response;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.oozie.ambari.view.exception.WfmWebException;
 
 public class WorkflowsManagerResource {
 	private final WorkflowManagerService workflowManagerService;
@@ -38,19 +40,27 @@ public class WorkflowsManagerResource {
 		this.workflowManagerService=new WorkflowManagerService(viewContext);
 	}
 
-	@GET
-	public Map<String,Object> getWorkflows(){
-	    HashMap<String,Object> result=new HashMap<>();
-	    result.put("wfprojects", workflowManagerService.getAllWorkflows(viewContext.getUsername()));
-	    return result;
-	}
-	
-	
-	@DELETE
+  @GET
+  public Response getWorkflows() {
+    try {
+      HashMap<String, Object> result = new HashMap<>();
+      result.put("wfprojects", workflowManagerService.getAllWorkflows(viewContext.getUsername()));
+      return Response.ok(result).build();
+    } catch (Exception ex) {
+      throw new WfmWebException(ex);
+    }
+  }
+
+
+  @DELETE
 	@Path("/{projectId}")
-	public void deleteWorkflow( @PathParam("projectId") String id,
-            @DefaultValue("false") @QueryParam("deleteDefinition") Boolean deleteDefinition){
-	    workflowManagerService.deleteWorkflow(id,deleteDefinition);
+	public Response deleteWorkflow(@PathParam("projectId") String id,
+                                 @DefaultValue("false") @QueryParam("deleteDefinition") Boolean deleteDefinition){
+	  try{
+      workflowManagerService.deleteWorkflow(id,deleteDefinition);
+      return Response.ok().build();
+    }catch (Exception ex) {
+      throw new WfmWebException(ex);
+    }
 	}
-	
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
index 3ccbc07..e94d51a 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
@@ -156,10 +156,12 @@ export default Ember.Component.extend(Ember.Evented, Validations, {
       }
       this.set('bundleFilePath', filePath);
       this.set("isImporting", false);
-    }.bind(this)).catch(function(e){
+    }.bind(this)).catch(function(data){
+      console.error(data);
+      this.set("errorMsg", "There is some problem while importing.");
       this.set("isImporting", false);
       this.set("isImportingSuccess", false);
-      throw new Error(e);
+      this.set("data", data);
     }.bind(this));
   },
   getBundleFromJSON(draftBundle){
@@ -363,7 +365,7 @@ export default Ember.Component.extend(Ember.Evented, Validations, {
       }.bind(this)).catch(function(e){
         this.$('#loading').hide();
         this.get("errors").pushObject({'message' : 'Could not process coordinator from ' + e.path});
-        throw new Error(e.trace);
+        throw new Error(e);
       }.bind(this));
     },
     preview(){

http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
index bbd619d..4a57e37 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/coord-config.js
@@ -274,10 +274,12 @@ export default Ember.Component.extend(Validations, Ember.Evented, {
       }
       this.set('coordinatorFilePath', filePath);
       this.set("isImporting", false);
-    }.bind(this)).catch(function(e){
+    }.bind(this)).catch(function(data){
+      console.error(data);
+      this.set("errorMsg", "There is some problem while importing.");
       this.set("isImporting", false);
       this.set("isImportingSuccess", false);
-      throw new Error(e);
+      this.set("data", data);
     }.bind(this));
   },
   getCoordinatorFromJSON(draftCoordinator){

http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js
index 7a7c38d..fdb4f5e 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-errors.js
@@ -18,4 +18,53 @@
 import Ember from 'ember';
 
 export default Ember.Component.extend({
+  showingStackTrace: false,
+  hasErrorMsg : Ember.computed('errorMsg', function() {
+    return !Ember.isBlank(this.get("errorMsg"));
+  }),
+  errorMsgDetails : Ember.computed('data.responseText', function() {
+    var jsonResponse = this.getparsedResponse();
+    if (jsonResponse.message) {
+      if (jsonResponse.message.indexOf('Permission denied') >= 0) {
+        return "Permission Denied";
+      }
+      return jsonResponse.message;
+    }
+    return "";
+  }),
+  stackTrace : Ember.computed('data.responseText', function() {
+      var jsonResponse = this.getparsedResponse();
+      var stackTraceMsg = jsonResponse.stackTrace;
+      if(!stackTraceMsg){
+        return "";
+      }
+      if (stackTraceMsg instanceof Array) {
+        return stackTraceMsg.join("").replace(/\tat /g, '&nbsp;&nbsp;&nbsp;&nbsp;at&nbsp;');
+      } else {
+        return stackTraceMsg.replace(/\tat /g, '<br/>&nbsp;&nbsp;&nbsp;&nbsp;at&nbsp;');
+      }
+  }),
+  isStackTraceAvailable : Ember.computed('stackTrace', function(){
+    return this.get('stackTrace') && this.get('stackTrace').length ? true : false;
+  }),
+  getparsedResponse() {
+    var response = this.get('data.responseText');
+    if (response) {
+      try {
+        return JSON.parse(response);
+      } catch(err){
+        return "";
+      }
+    }
+    return "";
+  },
+
+  actions: {
+    showStackTrace(){
+      this.set("showingStackTrace", !this.get("showingStackTrace"));
+    },
+    closeStackTrace(){
+      this.set("showingStackTrace", false);
+    }
+  }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
index f97add8..de72c6d 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
@@ -94,13 +94,13 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
   isWorkflowImporting: false,
   isAssetPublishing: false,
   errorMsg: "",
+  data : {
+    "responseText": ""
+  },
   shouldPersist : false,
   useCytoscape: Constants.useCytoscape,
   cyOverflow: {},
   clipboard : Ember.computed.alias('clipboardService.clipboard'),
-  isStackTraceVisible: false,
-  isStackTraceAvailable: false,
-  stackTrace:"",
   showingStreamImport:false,
   fileInfo:Ember.Object.create(),
   isDraft: false,
@@ -310,24 +310,6 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
   doValidation(){
     this.validate();
   },
-  getStackTrace(data){
-    if(data){
-     try{
-      var stackTraceMsg = JSON.parse(data).stackTrace;
-      if(!stackTraceMsg){
-        return "";
-      }
-     if(stackTraceMsg instanceof Array){
-       return stackTraceMsg.join("").replace(/\tat /g, '&nbsp;&nbsp;&nbsp;&nbsp;at&nbsp;');
-     } else {
-       return stackTraceMsg.replace(/\tat /g, '<br/>&nbsp;&nbsp;&nbsp;&nbsp;at&nbsp;');
-     }
-     } catch(err){
-       return "";
-     }
-    }
-    return "";
-  },
   importWorkflow(filePath){
     var self = this;
     this.set("isWorkflowImporting", true);
@@ -343,8 +325,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       this.set("workflowFilePath", filePath);
     }.bind(this)).catch(function(data){
       console.error(data);
-      self.set("errorMsg", "There is some problem while importing.Please try again.");
-      self.showingErrorMsgInDesigner(data);
+      self.set("errorMsg", "There is some problem while importing.");
+      self.set("data", data);
       self.set("isWorkflowImporting", false);
     });
   },
@@ -491,8 +473,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
     exportActionNodeXmlDefered.promise.then(function(data){
       self.set("isAssetPublishing", false);
     }.bind(this)).catch(function(data){
-      self.set("errorMsg", "There is some problem while publishing asset. Please try again.");
-      self.showingErrorMsgInDesigner(data);
+      self.set("errorMsg", "There is some problem while publishing asset.");
+      self.set("data", data);
       self.set("isAssetPublishing", false);
     });
 
@@ -739,15 +721,6 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       this.set("showingWorkflowConfigProps",true);
     }
   },
-  showingErrorMsgInDesigner(data){
-      var self = this, stackTraceMsg = self.getStackTrace(data.responseText);
-      if(stackTraceMsg.length){
-        self.set("stackTrace", stackTraceMsg);
-        self.set("isStackTraceAvailable", true);
-      } else {
-        self.set("isStackTraceAvailable", false);
-      }
-  },
   isDraftExists(path){
     var deferred = Ember.RSVP.defer(), url, self = this;
     if(!path){
@@ -833,12 +806,6 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       });
       reader.readAsText(file);
     },
-    showStackTrace(){
-      this.set("isStackTraceVisible", true);
-    },
-    hideStackTrace(){
-      this.set("isStackTraceVisible", false);
-    },
     showWorkflowSla (value) {
       this.set('showWorkflowSla', value);
     },
@@ -1029,9 +996,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
         actionSettingsXmlDefered.promise.then(function(data){
           this.importActionSettingsFromString(data);
         }.bind(this)).catch(function(data){
-          console.error(data);
-          self.set("errorMsg", "There is some problem while importing asset.Please try again.");
-          self.showingErrorMsgInDesigner(data);
+          self.set("errorMsg", "There is some problem while importing asset.");
+          self.set("data", data);
         });
       }
     },
@@ -1047,9 +1013,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
         actionSettingsXmlDefered.promise.then(function(data){
           this.importActionNodeFromString(data);
         }.bind(this)).catch(function(data){
-          console.error(data);
-          self.set("errorMsg", "There is some problem while importing asset. Please try again.");
-          self.showingErrorMsgInDesigner(data);
+          self.set("errorMsg", "There is some problem while importing asset.");
+          self.set("data", data);
         });
       }
     },
@@ -1184,9 +1149,9 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       saveAssetConfigDefered.promise.then(function(data){
         self.set("isAssetPublishing", false);
       }.bind(this)).catch(function(data){
+        self.set("errorMsg", "There is some problem while saving asset.");
+        self.set("data", data);
         self.set("isAssetPublishing", false);
-        self.set("errorMsg", "There is some problem while saving asset. Please try again.");
-        self.showingErrorMsgInDesigner(data);
       });
     },
     showAssetList(value) {
@@ -1204,9 +1169,9 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
         self.importActionSettingsFromString(importedAsset.definition);
         self.set("isAssetImporting", false);
       }.bind(this)).catch(function(data){
+        self.set("errorMsg", "There is some problem while importing asset.");
+        self.set("data", data);
         self.set("isAssetImporting", false);
-        self.set("errorMsg", "There is some problem while importing asset. Please try again.");
-        self.showingErrorMsgInDesigner(data);
       });
     },
     showAssetNodeList(value) {
@@ -1224,9 +1189,9 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
         self.importActionNodeFromString(importedAsset.definition);
         self.set("isAssetImporting", false);
       }.bind(this)).catch(function(data){
+        self.set("errorMsg", "There is some problem while importing asset.");
+        self.set("data", data);
         self.set("isAssetImporting", false);
-        self.set("errorMsg", "There is some problem while importing asset. Please try again.");
-        self.showingErrorMsgInDesigner(data);
       });
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
index e98d182..a424049 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
@@ -605,9 +605,10 @@ input:invalid {
 #configureJob .modal-dialog,
 #asset-delete-confirm-dialog .modal-dialog,
 #projectsList .modal-dialog,
-#previewModal .modal-dialog {
+#previewModal .modal-dialog,
+#stack_trace_dialog .modal-dialog {
     width: @modalDialogWidth;
-height: 100vh;
+    height: 100vh;
 }
 
 #collapseOne{
@@ -1547,15 +1548,11 @@ height: 100vh;
   padding-left: 0px;
   padding-right: 0px;
 }
-#stackTrace{
-  white-space: pre-wrap;
-  max-width: 100%;
-  max-height: 400px;
-  overflow: scroll;
-}
+
 .jobIdClass {
   width: 50px;
 }
+
 .width50 {
     white-space: nowrap;
     width: 150px;

http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
index ca58431..2d374a5 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/bundle-config.hbs
@@ -72,7 +72,7 @@
     <div id='loading'>
       {{spin-spinner lines=13 length=20 width=10}}
     </div>
-    {{designer-errors errors=errors}}
+    {{designer-errors errors=errors validationErrors=validationErrors errorMsg=errorMsg data=data}}
     <form class="form-horizontal">
       <div class="col-sm-12 paddingtop10">
         <div class="col-sm-8 centralize-panel">
@@ -100,7 +100,7 @@
                   <li class="list-group-item">No Coordinators Configured.</li>
                   {{/each}}
                 </ul>
-                {{#field-error model=this field='bundle.coordinators' showErrorMessage=true}}{{/field-error}}
+                {{#field-error model=this field='bundle.coordinators' showErrorMessage=showErrorMessage}}{{/field-error}}
 
               {{#if coordinatorCreateMode}}
               {{#bundle-coord-config coordinator=currentCoordinator openTab="openTab" openFileBrowser="openFileBrowser" add="addCoordinator" cancel="cancelCoordinatorOperation" createMode=coordinatorCreateMode}}{{/bundle-coord-config}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
index 7b607ca..7db5ce2 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/coord-config.hbs
@@ -93,7 +93,7 @@
   {{spin-spinner lines=13 length=20 width=10}}
 </div>
 <div class="container-fluid">
-  {{designer-errors errors=errors}}
+  {{designer-errors errors=errors validationErrors=validationErrors errorMsg=errorMsg data=data}}
   <form class="form-horizontal">
     <div class="col-sm-12 paddingtop10">
       <div class="col-sm-8 centralize-panel">

http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs
index 8438255..00cb8a6 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-errors.hbs
@@ -34,4 +34,21 @@
       {{/if}}
     </div>
   {{/if}}
+
+  {{#if hasErrorMsg}}
+    <div id="loader">
+        <div id="alert"class="alert alert-danger alert-dismissible workflow-error" role="alert">
+            {{errorMsg}}
+            <div id="errorMsgDetails">
+              {{errorMsgDetails}}
+              {{#if isStackTraceAvailable}}
+                <a href="#" class="action-link" {{action "showStackTrace"}}>Details</a>
+              {{/if}}
+            </div>
+        </div>
+    </div>
+  {{/if}}
 </div>
+{{#if showingStackTrace}}
+  {{#stack-trace-dialog title="Stack Trace" stackTrace=stackTrace closeStackTrace="closeStackTrace"}}{{/stack-trace-dialog}}
+{{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/890ad905/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
index 1a73421..4ff9d87 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
@@ -138,7 +138,7 @@
   {{/if}}
   <div  id="content" class="panel panel-default designer-main-panel col-xs-20">
     <div class="designer-panel designer-canvas">
-      {{designer-errors errors=errors validationErrors=validationErrors}}
+      {{designer-errors errors=errors validationErrors=validationErrors errorMsg=errorMsg data=data}}
       {{#if undoAvailable}}
         <div id="alert"class="alert alert-warning workflow-error" role="alert">
           {{#if (eq undoType 'nodeDeleted')}}
@@ -157,22 +157,6 @@
           </div>
       </div>
       {{/if}}
-      {{#if (not (eq errorMsg ""))}}
-        <div id="loader">
-            <div id="alert"class="alert alert-danger alert-dismissible workflow-error" role="alert">
-                {{errorMsg}}
-                {{#if isStackTraceAvailable}}
-                  {{#if isStackTraceVisible}}
-                    <a href="#" class="action-link" {{action "hideStackTrace"}}>Hide Log</a>
-                    <div id="stackTrace">{{{stackTrace}}}</div>
-                  {{/if}}
-                  {{#unless isStackTraceVisible}}
-                    <a href="#" class="action-link" {{action "showStackTrace"}}>Show Log</a>
-                  {{/unless}}
-                {{/if}}
-            </div>
-        </div>
-      {{/if}}
       {{#if isAssetPublishing}}
         <div id="loader">
             <div id="alert"class="alert alert-info alert-dismissible workflow-error" role="alert">


[29/50] ambari git commit: AMBARI-20107. Action node shows unsupported properties even though there are none in workflow manager.(Padma Priya N via gauravn7)

Posted by nc...@apache.org.
AMBARI-20107. Action node shows unsupported properties even though there are none in workflow manager.(Padma Priya N via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ba470c18
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ba470c18
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ba470c18

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: ba470c186875be371152e2a3ce37fb3c9a383f5a
Parents: f936bcf
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Thu Feb 23 10:58:02 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Thu Feb 23 10:58:02 2017 +0530

----------------------------------------------------------------------
 .../ui/app/components/workflow-action-editor.js | 40 ++++++++++++++++----
 .../components/workflow-action-editor.hbs       |  5 ++-
 2 files changed, 36 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ba470c18/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js
index d09de7d..e19646d 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js
@@ -62,8 +62,18 @@ export default Ember.Component.extend( Ember.Evented,{
   saveClicked : false,
   unsupportedPropertiesXml : Ember.computed('actionModel.unsupportedProperties', {
     get(key){
+      let unsupportedPropertiesXml;
+      if(!this.get('actionModel.unsupportedProperties')){
+        return unsupportedPropertiesXml;
+      }
       let x2js = new X2JS();
-      return vkbeautify.xml(x2js.json2xml_str(this.get('actionModel.unsupportedProperties')));
+      let unsupportedProperties = Ember.copy(this.get('actionModel.unsupportedProperties'));
+      delete unsupportedProperties['@id'];
+      delete unsupportedProperties.__jsogObjectId;
+      if(!Ember.isEmpty(Object.keys(unsupportedProperties))){
+        unsupportedPropertiesXml = vkbeautify.xml(x2js.json2xml_str(this.get('actionModel.unsupportedProperties')));
+      }
+      return unsupportedPropertiesXml;
     },
     set(key, value) {
       let x2js = new X2JS();
@@ -75,12 +85,15 @@ export default Ember.Component.extend( Ember.Evented,{
       return value;
     }
   }),
+  containsUnsupportedProperties : Ember.computed('unsupportedPropertiesXml', function(){
+    return this.get('unsupportedPropertiesXml') && this.get('unsupportedPropertiesXml').length > 0;
+  }),
   actionXml : Ember.computed('actionModel', {
     get(key) {
       let x2js = new X2JS();
       var startTag = `<${this.get('actionType')}`;
       Object.keys(this.get('actionModel')).forEach(key => {
-        if(key.startsWith('_')){
+        if(key.startsWith('_') && key !== '__jsogObjectId'){
           startTag = `${startTag} ${key.substr(1)}="${this.get('actionModel')[key]}"`;
         }
       });
@@ -150,11 +163,6 @@ export default Ember.Component.extend( Ember.Evented,{
       delete this.get('actionModel').slaInfo;
       delete this.get('actionModel').slaEnabled;
     }
-    if(this.get('actionModel.unsupportedProperties') && !Ember.isEmpty(Object.keys(this.get('actionModel.unsupportedProperties')))){
-      this.set('containsUnsupportedProperties', true);
-    }else{
-      this.set('containsUnsupportedProperties', false);
-    }
   }.on('init'),
   initialize : function(){
     this.$('#action_properties_dialog').modal({
@@ -203,6 +211,19 @@ export default Ember.Component.extend( Ember.Evented,{
       }
     });
   },
+  validateDecisionNode(){
+    let containsOtherNodes = false;
+    this.get('actionModel').forEach((model)=>{
+      if(model.node.type !== 'kill'){
+        containsOtherNodes = true;
+      }
+    });
+    if(!containsOtherNodes){
+      this.get('errors').pushObject({message:'Atleast one of the decision branches should transition to a node other than a kill node.'});
+    }else{
+      this.get('errors').clear();
+    }
+  },
   actions : {
     closeEditor (){
       this.sendAction('close');
@@ -237,6 +258,11 @@ export default Ember.Component.extend( Ember.Evented,{
     },
     registerChild (name, context){
       this.get('childComponents').set(name, context);
+    },
+    showUnsupportedProperties(){
+      this.$('#action_properties_dialog .modal-body').animate({
+        scrollTop: this.$("#unsupported-props").offset().top
+      }, 'fast');
     }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba470c18/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-action-editor.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-action-editor.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-action-editor.hbs
index fae2d3d..788916b 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-action-editor.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-action-editor.hbs
@@ -32,11 +32,12 @@
         <div>
             <form class="form-horizontal" id="action_properties">
               {{#if (eq nodeType 'decision')}}
-                {{#decision-config actionModel=actionModel killNodes=killNodes register="registerChild"}}{{/decision-config}}
+                {{designer-errors errors=errors}}
+                {{#decision-config currentNode=currentNode actionModel=actionModel killNodes=killNodes register="registerChild"}}{{/decision-config}}
               {{/if}}
               {{#if (eq nodeType 'action')}}
                 {{#if containsUnsupportedProperties}}
-                  <span class="unsupported-elt-warning"> <p><a href='#unsupported-props'><i class="fa fa-exclamation-triangle" aria-hidden="true"></i>Action contains elements that are not currently supported by the designer.</a></p></span>
+                  <span class="unsupported-elt-warning"> <p><a href {{action "showUnsupportedProperties"}}><i class="fa fa-exclamation-triangle" aria-hidden="true"></i>Action contains elements that are not currently supported by the designer.</a></p></span>
                 {{/if}}
                 {{#if (eq actionType 'java')}}
                   {{#java-action actionModel=actionModel transition=transition killNodes=killNodes openFileBrowser="openFileBrowser" register="registerChild" addKillNode="addKillNode" currentNode=currentNode credentials=credentials}}{{/java-action}}


[31/50] ambari git commit: AMBARI-20127. Hive 2: "insert udf" opens empty drop down (pallavkul)

Posted by nc...@apache.org.
AMBARI-20127. Hive 2: "insert udf" opens empty drop down (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/03d0587e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/03d0587e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/03d0587e

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 03d0587e2e29d7f9b28844b7dfe2042813947dd6
Parents: 5186db0
Author: pallavkul <pa...@gmail.com>
Authored: Thu Feb 23 11:40:28 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Thu Feb 23 11:40:28 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/routes/queries/query.js   | 16 ++++++++++++++++
 .../resources/ui/app/templates/queries/query.hbs    |  4 +---
 2 files changed, 17 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/03d0587e/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index 64f38ab..5bed2c6 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -54,6 +54,22 @@ export default Ember.Route.extend(UILoggerMixin, {
       this.controller.set('fileResourceList', fileResourceList);
     });
 
+    this.store.findAll('udf').then((data) => {
+      let allUDFList = [];
+      data.forEach(x => {
+        let localUDF = {'id': x.get('id'),
+          'name': x.get('name'),
+          'classname': x.get('classname'),
+          'fileResource': x.get('fileResource'),
+          'owner': x.get('owner')
+        };
+        allUDFList.push(localUDF);
+      });
+      this.controller.set('allUDFList', allUDFList);
+    });
+
+
+
     this.store.findAll('setting').then((data) => {
       let localStr = '';
       data.forEach(x => {

http://git-wip-us.apache.org/repos/asf/ambari/blob/03d0587e/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
index 4fd3ce201..9be3873 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
@@ -32,10 +32,8 @@
 
         <button class="btn btn-default" {{action "openWorksheetModal" }}>{{fa-icon "save"}} Save As</button>
         <div class="btn-group">
-          <button class="btn btn-default" type="button" data-toggle="dropdown">Insert UDF
+          <button class="btn btn-default" type="button" data-toggle="dropdown" disabled={{ not allUDFList.length }}>Insert UDF
             <span class="caret"></span></button>
-
-
             <ul class="dropdown-menu">
               {{#each fileResourceList as |fileResource|}}
                 {{fileresource-item fileResource=fileResource createQuery='createQuery'}}


[16/50] ambari git commit: AMBARI-20068. Getting Internal Server Error (500) on services API while trying to start all services with atleast one component in INSTALL_FAILED state (dlysnichenko)

Posted by nc...@apache.org.
AMBARI-20068. Getting Internal Server Error (500) on services API while trying to start all services with atleast one component in INSTALL_FAILED state (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/efbd66b5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/efbd66b5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/efbd66b5

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: efbd66b56fa363462ff70660ec1d487457120337
Parents: 6baf387
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Wed Feb 22 12:54:55 2017 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Wed Feb 22 12:54:55 2017 +0200

----------------------------------------------------------------------
 .../server/controller/internal/ServiceResourceProvider.java      | 2 +-
 .../server/controller/internal/UpgradeResourceProvider.java      | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/efbd66b5/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
index 99a81c1..a1c53a7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
@@ -781,7 +781,7 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
           if ( ambariMetaInfo.getComponent(
               sid.getStackName(), sid.getStackVersion(), sc.getServiceName(),
               sch.getServiceComponentName()).isMaster()) {
-            throw new AmbariException(error);
+            throw new IllegalArgumentException(error);
           } else {
             LOG.warn("Ignoring: " + error);
             continue;

http://git-wip-us.apache.org/repos/asf/ambari/blob/efbd66b5/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index 9ecb774..13a6c36 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -1984,8 +1984,8 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
       if( null != existingUpgrade ){
         throw new AmbariException(
             String.format("Unable to perform %s as another %s (request ID %s) is in progress.",
-                direction.getText(false), direction.getText(false),
-                existingUpgrade.getRequestId().longValue()));
+                direction.getText(false), existingUpgrade.getDirection().getText(false),
+                existingUpgrade.getRequestId()));
       }
 
       // skip this check if it's a downgrade or we are instructed to skip it


[39/50] ambari git commit: AMBARI-20082. Wizard 'Assign Master ' step is showing inconsistent Select view. Additional changes. (akovalenko)

Posted by nc...@apache.org.
AMBARI-20082. Wizard 'Assign Master ' step is showing inconsistent Select view. Additional changes. (akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/98f40e32
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/98f40e32
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/98f40e32

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 98f40e324ed5594384efc8dc8e43fae9ff0a9d34
Parents: 6572b16
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Thu Feb 23 14:59:22 2017 +0200
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Thu Feb 23 15:32:45 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/styles/wizard.less | 14 +++++++++-----
 1 file changed, 9 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/98f40e32/ambari-web/app/styles/wizard.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/wizard.less b/ambari-web/app/styles/wizard.less
index 3855c9a..45d9e28 100644
--- a/ambari-web/app/styles/wizard.less
+++ b/ambari-web/app/styles/wizard.less
@@ -411,11 +411,15 @@
       color: #ccc;
     }
     .dropdown {
-      button .selected-item {
-        width: 95%;
-        margin-right: 5px;
-        float: left;
-        overflow: hidden;
+      button {
+        width: 100%;
+        .selected-item {
+          width: 95%;
+          margin-right: 5px;
+          float: left;
+          overflow: hidden;
+          text-overflow: ellipsis;
+        }
       }
     }
   }


[35/50] ambari git commit: AMBARI-20063. Removing secure ACLs from Kafka znodes during dekerberization (Attila Magyar via adoroszlai)

Posted by nc...@apache.org.
AMBARI-20063. Removing secure ACLs from Kafka znodes during dekerberization (Attila Magyar via adoroszlai)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/83cdcea5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/83cdcea5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/83cdcea5

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 83cdcea5fb2c5bee698b2f070489a39ed4b3df40
Parents: 1128732
Author: Attila Magyar <am...@hortonworks.com>
Authored: Thu Feb 23 11:14:03 2017 +0100
Committer: Attila Doroszlai <ad...@hortonworks.com>
Committed: Thu Feb 23 11:14:03 2017 +0100

----------------------------------------------------------------------
 .../0.1.0.2.3/package/scripts/metadata_server.py     |  5 ++---
 .../ATLAS/0.1.0.2.3/package/scripts/params.py        |  1 +
 .../KAFKA/0.8.1/package/scripts/kafka_broker.py      | 15 ++++++++++++++-
 .../KAFKA/0.8.1/package/scripts/params.py            |  2 ++
 4 files changed, 19 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/83cdcea5/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
index ad3270e..3c62243 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
@@ -154,14 +154,13 @@ class MetadataServer(Script):
 
   def disable_security(self, env):
     import params
-    if not params.stack_supports_zk_security:
-      Logger.info("Stack doesn't support zookeeper security")
-      return
     if not params.zookeeper_quorum:
       Logger.info("No zookeeper connection string. Skipping reverting ACL")
       return
     zkmigrator = ZkMigrator(params.zookeeper_quorum, params.java_exec, params.java64_home, params.atlas_jaas_file, params.metadata_user)
     zkmigrator.set_acls(params.zk_root if params.zk_root.startswith('/') else '/' + params.zk_root, 'world:anyone:crdwa')
+    if params.atlas_kafka_group_id:
+      zkmigrator.set_acls(format('/consumers/{params.atlas_kafka_group_id}'), 'world:anyone:crdwa')
 
   def status(self, env):
     import status_params

http://git-wip-us.apache.org/repos/asf/ambari/blob/83cdcea5/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
index e270733..a476e77 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
@@ -83,6 +83,7 @@ java_version = expect("/hostLevelParams/java_version", int)
 
 zk_root = default('/configurations/application-properties/atlas.server.ha.zookeeper.zkroot', '/apache_atlas')
 stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
+atlas_kafka_group_id = default('/configurations/application-properties/atlas.kafka.hook.group.id', None)
 
 if security_enabled:
   _hostname_lowercase = config['hostname'].lower()

http://git-wip-us.apache.org/repos/asf/ambari/blob/83cdcea5/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
index 0901730..2f6bfaa 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
@@ -28,7 +28,6 @@ from resource_management.libraries.functions.check_process_status import check_p
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.show_logs import show_logs
-from resource_management.libraries.functions.default import default
 from kafka import ensure_base_directories
 
 import upgrade
@@ -111,6 +110,20 @@ class KafkaBroker(Script):
           action = "delete"
     )
 
+  def disable_security(self, env):
+    import params
+    if not params.zookeeper_connect:
+      Logger.info("No zookeeper connection string. Skipping reverting ACL")
+      return
+    if not params.secure_acls:
+      Logger.info("The zookeeper.set.acl is false. Skipping reverting ACL")
+      return
+    Execute(
+      "{0} --zookeeper.connect {1} --zookeeper.acl=unsecure".format(params.kafka_security_migrator, params.zookeeper_connect), \
+      user=params.kafka_user, \
+      environment={ 'JAVA_HOME': params.java64_home }, \
+      logoutput=True, \
+      tries=3)
 
   def status(self, env):
     import status_params

http://git-wip-us.apache.org/repos/asf/ambari/blob/83cdcea5/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
index 1d3a195..b338add 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
@@ -102,6 +102,8 @@ kafka_hosts.sort()
 
 zookeeper_hosts = config['clusterHostInfo']['zookeeper_hosts']
 zookeeper_hosts.sort()
+secure_acls = default("/configurations/kafka-broker/zookeeper.set.acl", False)
+kafka_security_migrator = os.path.join(kafka_home, "bin", "zookeeper-security-migration.sh")
 
 #Kafka log4j
 kafka_log_maxfilesize = default('/configurations/kafka-log4j/kafka_log_maxfilesize',256)


[21/50] ambari git commit: AMBARI-19991. cluster deployment fails if cluster template has fewer host_groups than that in blueprint (Amruta Borkar via magyari_sandor)

Posted by nc...@apache.org.
AMBARI-19991. cluster deployment fails if cluster template has fewer host_groups than that in blueprint (Amruta Borkar via magyari_sandor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/45b423b7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/45b423b7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/45b423b7

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 45b423b7be199115f36cad828fd77a45fed21b63
Parents: b789b7c
Author: Amruta Borkar <ar...@us.ibm.com>
Authored: Wed Feb 22 11:43:09 2017 +0100
Committer: Sandor Magyari <sm...@hortonworks.com>
Committed: Wed Feb 22 13:03:24 2017 +0100

----------------------------------------------------------------------
 .../java/org/apache/ambari/server/topology/TopologyManager.java   | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/45b423b7/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
index c4c3dd2..a26624e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
@@ -1050,6 +1050,9 @@ public class TopologyManager {
           if (groupInfo != null) {
             LOG.info("TopologyManager.ConfigureClusterTask areHostGroupsResolved: host group name = {} requires {} hosts to be mapped, but only {} are available.",
                 groupInfo.getHostGroupName(), groupInfo.getRequestedHostCount(), groupInfo.getHostNames().size());
+          } else {
+              LOG.error("TopologyManager.ConfigureClusterTask areHostGroupsResolved: host group name = {} is required group and does not map to any hosts. Use add host API to add host to this host group.",
+                  hostGroup);
           }
           break;
         } else {


[15/50] ambari git commit: AMBARI-20088 Log Search should handle turned off Credential Store too (mgergely)

Posted by nc...@apache.org.
AMBARI-20088 Log Search should handle turned off Credential Store too (mgergely)

Change-Id: Ib19258e0a2ac7c90118319b3dfe638009b7083a6


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6baf3875
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6baf3875
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6baf3875

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 6baf387596906005391746bd41b60283b9aed980
Parents: df22765
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Wed Feb 22 10:22:03 2017 +0100
Committer: Miklos Gergely <mg...@hortonworks.com>
Committed: Wed Feb 22 10:22:03 2017 +0100

----------------------------------------------------------------------
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |  9 ++++
 .../0.5.0/package/scripts/setup_logfeeder.py    | 43 ++++++++++++++++----
 .../0.5.0/package/scripts/setup_logsearch.py    | 35 ++++++++++++----
 .../test/python/stacks/2.4/configs/default.json |  1 +
 4 files changed, 74 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6baf3875/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
index a023f2f..17c536e 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
@@ -53,9 +53,14 @@ tmp_dir = Script.get_tmp_dir()
 sudo = AMBARI_SUDO_BINARY
 security_enabled = status_params.security_enabled
 
+credential_store_enabled = False
+if 'credentialStoreEnabled' in config:
+  credential_store_enabled = config['credentialStoreEnabled']
+
 logsearch_server_conf = "/etc/ambari-logsearch-portal/conf"
 logsearch_server_keys_folder = logsearch_server_conf + "/keys"
 logsearch_logfeeder_conf = "/etc/ambari-logsearch-logfeeder/conf"
+logsearch_logfeeder_keys_folder = logsearch_logfeeder_conf + "/keys"
 
 logsearch_config_set_dir = format("{logsearch_server_conf}/solr_configsets")
 
@@ -176,8 +181,10 @@ logsearch_app_max_memory = config['configurations']['logsearch-env']['logsearch_
 
 logsearch_keystore_location = config['configurations']['logsearch-env']['logsearch_keystore_location']
 logsearch_keystore_type = config['configurations']['logsearch-env']['logsearch_keystore_type']
+logsearch_keystore_password = config['configurations']['logsearch-env']['logsearch_keystore_password']
 logsearch_truststore_location = config['configurations']['logsearch-env']['logsearch_truststore_location']
 logsearch_truststore_type = config['configurations']['logsearch-env']['logsearch_truststore_type']
+logsearch_truststore_password = config['configurations']['logsearch-env']['logsearch_truststore_password']
 
 logsearch_env_config = dict(config['configurations']['logsearch-env'])
 logsearch_env_jceks_file = os.path.join(logsearch_server_conf, 'logsearch.jceks')
@@ -312,8 +319,10 @@ logfeeder_log4j_content = config['configurations']['logfeeder-log4j']['content']
 
 logfeeder_keystore_location = config['configurations']['logfeeder-env']['logfeeder_keystore_location']
 logfeeder_keystore_type = config['configurations']['logfeeder-env']['logfeeder_keystore_type']
+logfeeder_keystore_password = config['configurations']['logfeeder-env']['logfeeder_keystore_password']
 logfeeder_truststore_location = config['configurations']['logfeeder-env']['logfeeder_truststore_location']
 logfeeder_truststore_type = config['configurations']['logfeeder-env']['logfeeder_truststore_type']
+logfeeder_truststore_password = config['configurations']['logfeeder-env']['logfeeder_truststore_password']
 
 logfeeder_env_config = dict(config['configurations']['logfeeder-env'])
 logfeeder_env_jceks_file = os.path.join(logsearch_logfeeder_conf, 'logfeeder.jceks')

http://git-wip-us.apache.org/repos/asf/ambari/blob/6baf3875/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
index 6952c2c..e6e55b9 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
@@ -45,13 +45,42 @@ def setup_logfeeder():
        content=''
        )
 
-  params.logfeeder_env_config = update_credential_provider_path(params.logfeeder_env_config,
-                                                                'logfeeder-env',
-                                                                params.logfeeder_env_jceks_file,
-                                                                params.logsearch_user,
-                                                                params.user_group
-                                                                )
-  params.logfeeder_properties[HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME] = 'jceks://file' + params.logfeeder_env_jceks_file
+  if params.credential_store_enabled:
+    params.logfeeder_env_config = update_credential_provider_path(params.logfeeder_env_config,
+                                                                 'logfeeder-env',
+                                                                 params.logfeeder_env_jceks_file,
+                                                                 params.logsearch_user,
+                                                                 params.user_group
+                                                                 )
+    params.logfeeder_properties[HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME] = 'jceks://file' + params.logfeeder_env_jceks_file
+    File(format("{logsearch_logfeeder_keys_folder}/ks_pass.txt"),
+         action="delete"
+         )
+    File(format("{logsearch_logfeeder_keys_folder}/ts_pass.txt"),
+         action="delete"
+         )
+  else:
+    Directory(params.logsearch_logfeeder_keys_folder,
+              cd_access='a',
+              mode=0755,
+              owner=params.logsearch_user,
+              group=params.user_group
+              )
+   
+    File(format("{logsearch_logfeeder_keys_folder}/ks_pass.txt"),
+         content=params.logfeeder_keystore_password,
+         mode=0600,
+         owner=params.logsearch_user,
+         group=params.user_group
+         )
+
+    File(format("{logsearch_logfeeder_keys_folder}/ts_pass.txt"),
+         content=params.logfeeder_truststore_password,
+         mode=0600,
+         owner=params.logsearch_user,
+         group=params.user_group
+         )
+  
   PropertiesFile(format("{logsearch_logfeeder_conf}/logfeeder.properties"),
                  properties = params.logfeeder_properties
                  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/6baf3875/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
index f96bfd0..7738cc1 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
@@ -60,13 +60,34 @@ def setup_logsearch():
        content=''
        )
 
-  params.logsearch_env_config = update_credential_provider_path(params.logsearch_env_config,
-                                                                'logsearch-env',
-                                                                params.logsearch_env_jceks_file,
-                                                                params.logsearch_user,
-                                                                params.user_group
-                                                                )
-  params.logsearch_properties[HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME] = 'jceks://file' + params.logsearch_env_jceks_file
+  if params.credential_store_enabled:
+    params.logsearch_env_config = update_credential_provider_path(params.logsearch_env_config,
+                                                                 'logsearch-env',
+                                                                 params.logsearch_env_jceks_file,
+                                                                 params.logsearch_user,
+                                                                 params.user_group
+                                                                 )
+    params.logsearch_properties[HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME] = 'jceks://file' + params.logsearch_env_jceks_file
+    File(format("{logsearch_server_keys_folder}/ks_pass.txt"),
+         action="delete"
+         )
+    File(format("{logsearch_server_keys_folder}/ts_pass.txt"),
+         action="delete"
+         )
+  else:
+    File(format("{logsearch_server_keys_folder}/ks_pass.txt"),
+         content=params.logsearch_keystore_password,
+         mode=0600,
+         owner= params.logsearch_user,
+         group=params.user_group
+         )
+    File(format("{logsearch_server_keys_folder}/ts_pass.txt"),
+         content=params.logsearch_truststore_password,
+         mode=0600,
+         owner= params.logsearch_user,
+         group=params.user_group
+         )
+  
   PropertiesFile(format("{logsearch_server_conf}/logsearch.properties"),
                  properties=params.logsearch_properties
                  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/6baf3875/ambari-server/src/test/python/stacks/2.4/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/configs/default.json b/ambari-server/src/test/python/stacks/2.4/configs/default.json
index d4e6064..8822e96 100644
--- a/ambari-server/src/test/python/stacks/2.4/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.4/configs/default.json
@@ -18,6 +18,7 @@
         "java_home": "/usr/jdk64/jdk1.7.0_45",
         "db_name": "ambari"
     },
+    "credentialStoreEnabled": "true",
     "commandType": "EXECUTION_COMMAND",
     "roleParams": {},
     "serviceName": "SLIDER",


[19/50] ambari git commit: AMBARI-20096. Log Search: only update schema file for Solr if it has any new fields in the local file (oleewere)

Posted by nc...@apache.org.
AMBARI-20096. Log Search: only update schema file for Solr if it has any new fields in the local file (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/309dbd75
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/309dbd75
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/309dbd75

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 309dbd7581bb7826f80a21e046721ee30acc27fb
Parents: 813841f
Author: oleewere <ol...@gmail.com>
Authored: Mon Feb 20 15:17:59 2017 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Wed Feb 22 12:45:56 2017 +0100

----------------------------------------------------------------------
 .../handler/UploadConfigurationHandler.java     | 65 +++++++++++++++++---
 1 file changed, 58 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/309dbd75/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/handler/UploadConfigurationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/handler/UploadConfigurationHandler.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/handler/UploadConfigurationHandler.java
index 23defea..27a6705 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/handler/UploadConfigurationHandler.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/handler/UploadConfigurationHandler.java
@@ -19,6 +19,7 @@
 package org.apache.ambari.logsearch.handler;
 
 import org.apache.ambari.logsearch.conf.SolrPropsConfig;
+import org.apache.commons.configuration.XMLConfiguration;
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.common.cloud.SolrZkClient;
@@ -29,6 +30,8 @@ import java.io.File;
 import java.io.IOException;
 import java.nio.file.FileSystems;
 import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.UUID;
 
 public class UploadConfigurationHandler implements SolrZkRequestHandler<Boolean> {
@@ -37,6 +40,9 @@ public class UploadConfigurationHandler implements SolrZkRequestHandler<Boolean>
 
   private static final String SCHEMA_FILE = "managed-schema";
   private static final String SOLR_CONFIG_FILE = "solrconfig.xml";
+  private static final String FIELD_NAME_PATH = "field[@name]";
+  private static final String FIELD_TYPE_NAME_PATH = "fieldType[@name]";
+  private static final String DYNAMIC_FIELD_NAME_PATH = "dynamicField[@name]";
 
   private File configSetFolder;
 
@@ -65,13 +71,17 @@ public class UploadConfigurationHandler implements SolrZkRequestHandler<Boolean>
         File[] listOfFiles = configSetFolder.listFiles();
         if (listOfFiles != null) {
           for (File file : listOfFiles) {
-            if (file.getName().equals(SOLR_CONFIG_FILE) || file.getName().equals(SCHEMA_FILE)) { // TODO: try to find an another solution to reload schema
-              if (!FileUtils.contentEquals(file, new File(String.format("%s%s%s", downloadFolderLocation, separator, file.getName())))){
-                LOG.info("One of the local solr config file differs ('{}'), upload config set to zookeeper", file.getName());
-                zkConfigManager.uploadConfigDir(configSetFolder.toPath(), solrPropsConfig.getConfigName());
-                reloadCollectionNeeded = true;
-                break;
-              }
+            if (file.getName().equals(SOLR_CONFIG_FILE) && !FileUtils.contentEquals(file, new File(String.format("%s%s%s", downloadFolderLocation, separator, file.getName())))) {
+              LOG.info("Solr config file differs ('{}'), upload config set to zookeeper", file.getName());
+              zkConfigManager.uploadConfigDir(configSetFolder.toPath(), solrPropsConfig.getConfigName());
+              reloadCollectionNeeded = true;
+              break;
+            }
+            if (file.getName().equals(SCHEMA_FILE) && localSchemaFileHasMoreFields(file, new File(String.format("%s%s%s", downloadFolderLocation, separator, file.getName())))) {
+              LOG.info("Solr schema file differs ('{}'), upload config set to zookeeper", file.getName());
+              zkConfigManager.uploadConfigDir(configSetFolder.toPath(), solrPropsConfig.getConfigName());
+              reloadCollectionNeeded = true;
+              break;
             }
           }
         }
@@ -97,4 +107,45 @@ public class UploadConfigurationHandler implements SolrZkRequestHandler<Boolean>
     return reloadCollectionNeeded;
   }
 
+  private boolean localSchemaFileHasMoreFields(File localFile, File downloadedFile) {
+    try {
+      XMLConfiguration localFileXml = new XMLConfiguration(localFile);
+      XMLConfiguration downloadedFileXml = new XMLConfiguration(downloadedFile);
+
+      List<String> localFieldNames = (ArrayList<String>) localFileXml.getProperty(FIELD_NAME_PATH);
+      List<String> localFieldTypes = (ArrayList<String>) localFileXml.getProperty(FIELD_TYPE_NAME_PATH);
+      List<String> localDynamicFields = (ArrayList<String>) localFileXml.getProperty(DYNAMIC_FIELD_NAME_PATH);
+
+      List<String> fieldNames = (ArrayList<String>) downloadedFileXml.getProperty(FIELD_NAME_PATH);
+      List<String> fieldTypes = (ArrayList<String>) downloadedFileXml.getProperty(FIELD_TYPE_NAME_PATH);
+      List<String> dynamicFields = (ArrayList<String>) downloadedFileXml.getProperty(DYNAMIC_FIELD_NAME_PATH);
+
+      boolean fieldNameHasDiff = hasMoreFields(localFieldNames, fieldNames, FIELD_NAME_PATH);
+      boolean fieldTypeHasDiff = hasMoreFields(localFieldTypes, fieldTypes, FIELD_TYPE_NAME_PATH);
+      boolean dynamicFieldNameHasDiff = hasMoreFields(localDynamicFields, dynamicFields, DYNAMIC_FIELD_NAME_PATH);
+
+      return fieldNameHasDiff || fieldTypeHasDiff || dynamicFieldNameHasDiff;
+    } catch (Exception e) {
+      throw new RuntimeException("Exception during schema xml parsing.", e);
+    }
+  }
+
+  private boolean hasMoreFields(List<String> localFields, List<String> fields, String tag) {
+    boolean result = false;
+    if (localFields != null) {
+      if (fields == null) {
+        result = true;
+      } else {
+        localFields.removeAll(fields);
+        if (!localFields.isEmpty()) {
+          result = true;
+        }
+      }
+    }
+    if (result) {
+      LOG.info("Found new fields or field types in local schema file.: {} ({})", localFields.toString(), tag);
+    }
+    return result;
+  }
+
 }


[42/50] ambari git commit: AMBARI-20120 - Error during EU while updating Ranger Log4J service configs (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-20120 - Error during EU while updating Ranger Log4J service configs (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5efa6531
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5efa6531
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5efa6531

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 5efa6531d226fe80f717a8997b7a2970cda1ef7c
Parents: 890ad90
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Feb 22 16:06:46 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Feb 23 08:48:46 2017 -0500

----------------------------------------------------------------------
 .../internal/UpgradeResourceProvider.java       | 21 ++++-
 .../ambari/server/state/ConfigHelper.java       | 80 +++++++++++++-------
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  | 28 -------
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml | 17 -----
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml | 17 -----
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml | 18 -----
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     | 15 ----
 .../stacks/HDP/2.3/upgrades/upgrade-2.5.xml     | 14 ----
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     | 11 ---
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  | 28 -------
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml | 16 ----
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml | 16 ----
 .../stacks/HDP/2.4/upgrades/upgrade-2.5.xml     | 14 ----
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     | 11 ---
 14 files changed, 71 insertions(+), 235 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index 13a6c36..ca9ce07 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -1048,18 +1048,27 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
       Set<String> upgradePackServices = new HashSet<>();
       Set<String> upgradePackConfigTypes = new HashSet<>();
       AmbariMetaInfo ambariMetaInfo = s_metaProvider.get();
-      Map<String, ServiceInfo> stackServicesMap = ambariMetaInfo.getServices(targetStack.getStackName(), targetStack.getStackVersion());
+
+      // ensure that we get the service info from the target stack
+      // (since it could include new configuration types for a service)
+      Map<String, ServiceInfo> stackServicesMap = ambariMetaInfo.getServices(
+          targetStack.getStackName(), targetStack.getStackVersion());
+
       for (Grouping group : upgradePack.getGroups(direction)) {
         for (UpgradePack.OrderService service : group.services) {
           if (service.serviceName == null || upgradePackServices.contains(service.serviceName)) {
             // No need to re-process service that has already been looked at
             continue;
           }
+
           upgradePackServices.add(service.serviceName);
           ServiceInfo serviceInfo = stackServicesMap.get(service.serviceName);
           if (serviceInfo == null) {
             continue;
           }
+
+          // add every configuration type for all services defined in the
+          // upgrade pack
           Set<String> serviceConfigTypes = serviceInfo.getConfigTypeAttributes().keySet();
           for (String serviceConfigType : serviceConfigTypes) {
             if (!upgradePackConfigTypes.contains(serviceConfigType)) {
@@ -1068,6 +1077,9 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
           }
         }
       }
+
+      // build a set of configurations that should not be merged since their
+      // services are not installed
       Set<String> servicesNotInUpgradePack = new HashSet<>(stackServicesMap.keySet());
       servicesNotInUpgradePack.removeAll(upgradePackServices);
       for (String serviceNotInUpgradePack : servicesNotInUpgradePack) {
@@ -1079,7 +1091,9 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
           }
         }
       }
-      // Remove unused config-types from 'newConfigurationsByType'
+
+      // remove any configurations from the target stack that are not used
+      // because the services are not installed
       Iterator<String> iterator = newConfigurationsByType.keySet().iterator();
       while (iterator.hasNext()) {
         String configType = iterator.next();
@@ -1118,10 +1132,11 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         // get the existing configurations
         Map<String, String> existingConfigurations = currentClusterConfig.getProperties();
 
-        // if the new stack configurations don't have the type, then simple add
+        // if the new stack configurations don't have the type, then simply add
         // all of the existing in
         Map<String, String> newDefaultConfigurations = newConfigurationsByType.get(
             configurationType);
+
         if (null == newDefaultConfigurations) {
           newConfigurationsByType.put(configurationType, existingConfigurations);
           continue;

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index 6572bbb..d03a4dc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -180,7 +180,7 @@ public class ConfigHelper {
       clusterDesired = new HashMap<>();
     }
 
-    Map<String, Map<String, String>> resolved = new TreeMap<String, Map<String, String>>();
+    Map<String, Map<String, String>> resolved = new TreeMap<>();
 
     // Do not use host component config mappings.  Instead, the rules are:
     // 1) Use the cluster desired config
@@ -197,7 +197,7 @@ public class ConfigHelper {
           continue;
         }
 
-        Map<String, String> tags = new LinkedHashMap<String, String>();
+        Map<String, String> tags = new LinkedHashMap<>();
 
         tags.put(CLUSTER_DEFAULT_TAG, config.getTag());
 
@@ -232,17 +232,17 @@ public class ConfigHelper {
   public Map<String, Map<String, String>> getEffectiveConfigProperties(
       Cluster cluster, Map<String, Map<String, String>> desiredTags) {
 
-    Map<String, Map<String, String>> properties = new HashMap<String, Map<String, String>>();
+    Map<String, Map<String, String>> properties = new HashMap<>();
 
     if (desiredTags != null) {
       for (Entry<String, Map<String, String>> entry : desiredTags.entrySet()) {
         String type = entry.getKey();
         Map<String, String> propertyMap = properties.get(type);
         if (propertyMap == null) {
-          propertyMap = new HashMap<String, String>();
+          propertyMap = new HashMap<>();
         }
 
-        Map<String, String> tags = new HashMap<String, String>(entry.getValue());
+        Map<String, String> tags = new HashMap<>(entry.getValue());
         String clusterTag = tags.get(CLUSTER_DEFAULT_TAG);
 
         // Overrides is only supported if the config type exists at cluster
@@ -282,7 +282,7 @@ public class ConfigHelper {
   public Map<String, Map<String, Map<String, String>>> getEffectiveConfigAttributes(
       Cluster cluster, Map<String, Map<String, String>> desiredTags) {
 
-    Map<String, Map<String, Map<String, String>>> attributes = new HashMap<String, Map<String, Map<String, String>>>();
+    Map<String, Map<String, Map<String, String>>> attributes = new HashMap<>();
 
     if (desiredTags != null) {
       for (Entry<String, Map<String, String>> entry : desiredTags.entrySet()) {
@@ -290,13 +290,13 @@ public class ConfigHelper {
         String type = entry.getKey();
         Map<String, Map<String, String>> attributesMap = null;
 
-        Map<String, String> tags = new HashMap<String, String>(entry.getValue());
+        Map<String, String> tags = new HashMap<>(entry.getValue());
         String clusterTag = tags.get(CLUSTER_DEFAULT_TAG);
 
         if (clusterTag != null) {
           Config config = cluster.getConfig(type, clusterTag);
           if (config != null) {
-            attributesMap = new TreeMap<String, Map<String, String>>();
+            attributesMap = new TreeMap<>();
             cloneAttributesMap(config.getPropertiesAttributes(), attributesMap);
           }
           tags.remove(CLUSTER_DEFAULT_TAG);
@@ -326,7 +326,7 @@ public class ConfigHelper {
   public Map<String, String> getMergedConfig(Map<String,
       String> persistedClusterConfig, Map<String, String> override) {
 
-    Map<String, String> finalConfig = new HashMap<String, String>(persistedClusterConfig);
+    Map<String, String> finalConfig = new HashMap<>(persistedClusterConfig);
 
     if (override != null && override.size() > 0) {
       for (Entry<String, String> entry : override.entrySet()) {
@@ -480,7 +480,7 @@ public class ConfigHelper {
     StackInfo stack = ambariMetaInfo.getStack(stackId.getStackName(),
                                               stackId.getStackVersion());
 
-    Set<String> result = new HashSet<String>();
+    Set<String> result = new HashSet<>();
 
     for (Service service : clusters.getCluster(clusterName).getServices().values()) {
       Set<PropertyInfo> stackProperties = ambariMetaInfo.getServiceProperties(stack.getName(), stack.getVersion(), service.getName());
@@ -517,8 +517,9 @@ public class ConfigHelper {
     Set<PropertyInfo> serviceProperties = ambariMetaInfo.getServiceProperties(stack.getName(), stack.getVersion(), service.getName());
     for (PropertyInfo serviceProperty : serviceProperties) {
       if (serviceProperty.getPropertyTypes().contains(propertyType)) {
-        if (!serviceProperty.getPropertyValueAttributes().isKeyStore())
+        if (!serviceProperty.getPropertyValueAttributes().isKeyStore()) {
           continue;
+        }
         String stackPropertyConfigType = fileNameToConfigType(serviceProperty.getFilename());
         passwordProperties = result.get(stackPropertyConfigType);
         if (passwordProperties == null) {
@@ -561,7 +562,7 @@ public class ConfigHelper {
                                                        Map<String, ServiceInfo> servicesMap,
                                                        Set<PropertyInfo> stackProperties) throws AmbariException {
     Map<String, Config> actualConfigs = new HashMap<>();
-    Set<String> result = new HashSet<String>();
+    Set<String> result = new HashSet<>();
 
     for (Map.Entry<String, DesiredConfig> desiredConfigEntry : desiredConfigs.entrySet()) {
       String configType = desiredConfigEntry.getKey();
@@ -570,7 +571,7 @@ public class ConfigHelper {
     }
 
     for (Service service : cluster.getServices().values()) {
-      Set<PropertyInfo> serviceProperties = new HashSet<PropertyInfo>(servicesMap.get(service.getName()).getProperties());
+      Set<PropertyInfo> serviceProperties = new HashSet<>(servicesMap.get(service.getName()).getProperties());
       for (PropertyInfo serviceProperty : serviceProperties) {
         if (serviceProperty.getPropertyTypes().contains(propertyType)) {
           String stackPropertyConfigType = fileNameToConfigType(serviceProperty.getFilename());
@@ -724,7 +725,7 @@ public class ConfigHelper {
   public Set<PropertyInfo> getServiceProperties(StackId stackId, String serviceName, boolean removeExcluded)
       throws AmbariException {
     ServiceInfo service = ambariMetaInfo.getService(stackId.getStackName(), stackId.getStackVersion(), serviceName);
-    Set<PropertyInfo> properties = new HashSet<PropertyInfo>(service.getProperties());
+    Set<PropertyInfo> properties = new HashSet<>(service.getProperties());
 
     if (removeExcluded) {
       Set<String> excludedConfigTypes = service.getExcludedConfigTypes();
@@ -794,9 +795,9 @@ public class ConfigHelper {
 
     Config oldConfig = cluster.getDesiredConfigByType(configType);
     Map<String, String> oldConfigProperties;
-    Map<String, String> properties = new HashMap<String, String>();
+    Map<String, String> properties = new HashMap<>();
     Map<String, Map<String, String>> propertiesAttributes =
-      new HashMap<String, Map<String, String>>();
+      new HashMap<>();
 
     if (oldConfig == null) {
       oldConfigProperties = null;
@@ -890,7 +891,7 @@ public class ConfigHelper {
       Map<String, Map<String, String>> batchProperties, String authenticatedUserName,
       String serviceVersionNote) throws AmbariException {
 
-    Map<String, Set<Config>> serviceMapped = new HashMap<String, Set<Config>>();
+    Map<String, Set<Config>> serviceMapped = new HashMap<>();
 
     for (Map.Entry<String, Map<String, String>> entry : batchProperties.entrySet()) {
       String type = entry.getKey();
@@ -922,8 +923,33 @@ public class ConfigHelper {
 
   }
 
-  Config createConfig(Cluster cluster, AmbariManagementController controller, String type, String tag,
-                      Map<String, String> properties, Map<String, Map<String, String>> propertyAttributes) throws AmbariException {
+  /**
+   * Creates a new configuration using the specified tag as the first version
+   * tag. Otherwise, the configuration will be created with {@literal version}
+   * along with the current timestamp.
+   *
+   * @param cluster
+   *          the cluster (not {@code null}).
+   * @param controller
+   *          the controller which actually creates the configuration (not
+   *          {@code null}).
+   * @param type
+   *          the new configuration type (not {@code null}).
+   * @param tag
+   *          the initial tag; if this configuration already exists, it will use
+   *          the timestamp along with {@literal version}.
+   * @param properties
+   *          the properties to persist (not {@code null}).
+   * @param propertyAttributes
+   *          the attributes to persist, or {@code null} for none.
+   * @return
+   * @throws AmbariException
+   */
+  Config createConfig(Cluster cluster, AmbariManagementController controller, String type,
+      String tag, Map<String, String> properties,
+      Map<String, Map<String, String>> propertyAttributes) throws AmbariException {
+
+    // if the configuration is not new, then create a timestamp tag
     if (cluster.getConfigsByType(type) != null) {
       tag = "version" + System.currentTimeMillis();
     }
@@ -962,7 +988,7 @@ public class ConfigHelper {
    */
   public Map<String, Map<String, String>> getDefaultProperties(StackId stack, Cluster cluster, boolean onStackUpgradeFilter)
       throws AmbariException {
-    Map<String, Map<String, String>> defaultPropertiesByType = new HashMap<String, Map<String, String>>();
+    Map<String, Map<String, String>> defaultPropertiesByType = new HashMap<>();
 
     // populate the stack (non-service related) properties first
     Set<org.apache.ambari.server.state.PropertyInfo> stackConfigurationProperties = ambariMetaInfo.getStackProperties(
@@ -1155,8 +1181,8 @@ public class ConfigHelper {
   private Collection<String> findChangedKeys(Cluster cluster, String type,
                                              Collection<String> desiredTags, Collection<String> actualTags) {
 
-    Map<String, String> desiredValues = new HashMap<String, String>();
-    Map<String, String> actualValues = new HashMap<String, String>();
+    Map<String, String> desiredValues = new HashMap<>();
+    Map<String, String> actualValues = new HashMap<>();
 
     for (String tag : desiredTags) {
       Config config = cluster.getConfig(type, tag);
@@ -1172,7 +1198,7 @@ public class ConfigHelper {
       }
     }
 
-    List<String> keys = new ArrayList<String>();
+    List<String> keys = new ArrayList<>();
 
     for (Entry<String, String> entry : desiredValues.entrySet()) {
       String key = entry.getKey();
@@ -1192,7 +1218,7 @@ public class ConfigHelper {
    * @return the map of tags for a desired config
    */
   private Map<String, String> buildTags(HostConfig hc) {
-    Map<String, String> map = new LinkedHashMap<String, String>();
+    Map<String, String> map = new LinkedHashMap<>();
     map.put(CLUSTER_DEFAULT_TAG, hc.getDefaultVersionTag());
     if (hc.getConfigGroupOverrides() != null) {
       for (Entry<Long, String> entry : hc.getConfigGroupOverrides().entrySet()) {
@@ -1217,8 +1243,8 @@ public class ConfigHelper {
       desiredTags.remove(CLUSTER_DEFAULT_TAG);
     }
 
-    Set<String> desiredSet = new HashSet<String>(desiredTags.values());
-    Set<String> actualSet = new HashSet<String>(actualTags.values());
+    Set<String> desiredSet = new HashSet<>(desiredTags.values());
+    Set<String> actualSet = new HashSet<>(actualTags.values());
 
     // Both desired and actual should be exactly the same
     return !desiredSet.equals(actualSet);
@@ -1228,7 +1254,7 @@ public class ConfigHelper {
    * @return the list of combined config property names
    */
   private Collection<String> mergeKeyNames(Cluster cluster, String type, Collection<String> tags) {
-    Set<String> names = new HashSet<String>();
+    Set<String> names = new HashSet<>();
 
     for (String tag : tags) {
       Config config = cluster.getConfig(type, tag);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
index 8589e2d..98722c1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
@@ -287,27 +287,11 @@
             <type>ranger-env</type>
             <transfer operation="delete" delete-key="bind_anonymous" />
           </definition>
-          <definition xsi:type="configure" id="admin_log4j_parameterize" summary="Parameterizing Ranger Log4J Properties">
-            <type>admin-log4j</type>
-            <set key="ranger_xa_log_maxfilesize" value="256"/>
-            <set key="ranger_xa_log_maxbackupindex" value="20"/>
-            <replace key="content" find="log4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.xa_log_appender.MaxFileSize={{ranger_xa_log_maxfilesize}}MB"/>
-            <replace key="content" find="log4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.xa_log_appender.MaxBackupIndex={{ranger_xa_log_maxbackupindex}}"/>
-          </definition>
-
         </changes>
       </component>
 
       <component name="RANGER_USERSYNC">
         <changes>
-          <definition xsi:type="configure" id="usersync_log4j_parameterize" summary="Parameterizing Ranger Usersync Log4J Properties">
-            <type>usersync-log4j</type>
-            <set key="ranger_usersync_log_maxfilesize" value="256"/>
-            <set key="ranger_usersync_log_maxbackupindex" value="20"/>
-            <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.logFile.MaxFileSize = {{ranger_usersync_log_maxfilesize}}MB"/>
-            <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.logFile.MaxBackupIndex = {{ranger_usersync_log_maxbackupindex}}"/>
-          </definition>
-
           <definition xsi:type="configure" id="hdp_2_6_0_0_disable_delta_sync_during_upgrade">
             <type>ranger-ugsync-site</type>
             <set key="ranger.usersync.ldap.deltasync" value="false"
@@ -315,18 +299,6 @@
           </definition>
         </changes>
       </component>
-
-      <component name="RANGER_TAGSYNC">
-        <changes>
-          <definition xsi:type="configure" id="tagsync_log4j_parameterize" summary="Parameterizing Ranger Tagsync Log4J Properties">
-            <type>tagsync-log4j</type>
-            <set key="ranger_tagsync_log_maxfilesize" value="256"/>
-            <set key="ranger_tagsync_log_number_of_backup_files" value="20"/>
-            <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.logFile.MaxFileSize = {{ranger_tagsync_log_maxfilesize}}MB"/>
-            <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.logFile.MaxBackupIndex = {{ranger_tagsync_log_number_of_backup_files}}"/>
-          </definition>
-          </changes>
-      </component>
     </service>
 
     <service name="RANGER_KMS">

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
index 52421d9..b95bae5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
@@ -412,23 +412,6 @@
         </task>
       </execute-stage>
 
-      <!--RANGER-->
-      <execute-stage service="RANGER" component="RANGER_ADMIN" title="Parameterizing Ranger Admin Log4J Properties">
-        <task xsi:type="configure" id="admin_log4j_parameterize">
-          <summary>Updating the Ranger admin Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-      <execute-stage service="RANGER" component="RANGER_USERSYNC" title="Parameterizing Ranger Usersync Log4J Properties">
-        <task xsi:type="configure" id="usersync_log4j_parameterize">
-          <summary>Updating the Ranger usersync Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-      <execute-stage service="RANGER" component="RANGER_TAGSYNC" title="Parameterizing Ranger Tagsync Log4J Properties">
-        <task xsi:type="configure" id="tagsync_log4j_parameterize">
-          <summary>Updating the Ranger tagsync Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-
       <!--RANGER-KMS-->
       <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Parameterizing Ranger Kms Log4J Properties">
         <task xsi:type="configure" id="kms_log4j_parameterize">

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
index 280000d..be2694e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
@@ -473,12 +473,6 @@
         <task xsi:type="configure" id="hdp_2_5_0_0_set_external_solrCloud_flag"/>
       </execute-stage>
 
-      <execute-stage service="RANGER" component="RANGER_ADMIN" title="Parameterizing Ranger Admin Log4J Properties">
-        <task xsi:type="configure" id="admin_log4j_parameterize">
-          <summary>Updating the Ranger admin Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-
       <execute-stage service="RANGER" component="RANGER_ADMIN" title="Calculating Ranger Properties">
         <condition xsi:type="security" type="kerberos"/>
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerKerberosConfigCalculation">
@@ -492,17 +486,6 @@
         </task>
       </execute-stage>
 
-      <execute-stage service="RANGER" component="RANGER_USERSYNC" title="Parameterizing Ranger Usersync Log4J Properties">
-        <task xsi:type="configure" id="usersync_log4j_parameterize">
-          <summary>Updating the Ranger usersync Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-      <execute-stage service="RANGER" component="RANGER_TAGSYNC" title="Parameterizing Ranger Tagsync Log4J Properties">
-        <task xsi:type="configure" id="tagsync_log4j_parameterize">
-          <summary>Updating the Ranger tagsync Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-
       <!-- RANGER KMS -->
       <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Apply config changes for Ranger KMS Server">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_kms_audit_db"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
index d675986..e71aa7b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
@@ -535,12 +535,6 @@
         <task xsi:type="configure" id="hdp_2_5_0_0_set_external_solrCloud_flag"/>
       </execute-stage>
 
-      <execute-stage service="RANGER" component="RANGER_ADMIN" title="Parameterizing Ranger Admin Log4J Properties">
-        <task xsi:type="configure" id="admin_log4j_parameterize">
-          <summary>Updating the Ranger admin Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-
       <execute-stage service="RANGER" component="RANGER_ADMIN" title="Calculating Ranger Properties">
         <condition xsi:type="security" type="kerberos"/>
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerKerberosConfigCalculation">
@@ -558,22 +552,10 @@
         <task xsi:type="configure" id="hdp_2_6_0_0_remove_bind_anonymous"/>
       </execute-stage>
 
-       <execute-stage service="RANGER" component="RANGER_USERSYNC" title="Parameterizing Ranger Usersync Log4J Properties">
-        <task xsi:type="configure" id="usersync_log4j_parameterize">
-          <summary>Updating the Ranger usersync Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-
       <execute-stage service="RANGER" component="RANGER_USERSYNC" title="Apply config changes for Ranger Usersync">
         <task xsi:type="configure" id="hdp_2_6_0_0_disable_delta_sync_during_upgrade"/>
       </execute-stage>
 
-      <execute-stage service="RANGER" component="RANGER_TAGSYNC" title="Parameterizing Ranger Tagsync Log4J Properties">
-        <task xsi:type="configure" id="tagsync_log4j_parameterize">
-          <summary>Updating the Ranger tagsync Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-
       <!-- RANGER KMS -->
       <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Apply config changes for Ranger KMS Server">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_kms_audit_db"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
index b662b28..91c8cdf 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
@@ -514,8 +514,6 @@
             <function>setup_ranger_java_patches</function>
           </task>
 
-          <task xsi:type="configure" id="admin_log4j_parameterize" />
-
         </pre-upgrade>
         
         <pre-downgrade copy-upgrade="true" />
@@ -527,24 +525,11 @@
       </component>
 
       <component name="RANGER_USERSYNC">
-        <pre-upgrade>
-          <task xsi:type="configure" id="usersync_log4j_parameterize" />
-        </pre-upgrade>
-        <pre-downgrade/>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
       </component>
 
-      <component name="RANGER_TAGSYNC">
-        <pre-upgrade>
-          <task xsi:type="configure" id="tagsync_log4j_parameterize" />
-        </pre-upgrade>
-        <pre-downgrade />
-        <upgrade>
-          <task xsi:type="restart-task" />
-        </upgrade>
-      </component>
     </service>
 
     <service name="RANGER_KMS">

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
index b53ff23..fd93829 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
@@ -577,7 +577,6 @@
     <service name="RANGER">
       <component name="RANGER_ADMIN">
         <pre-upgrade>
-          <task xsi:type="configure" id="admin_log4j_parameterize" />
           <task xsi:type="execute" hosts="all">
             <summary>Stop Ranger Admin</summary>
             <script>scripts/ranger_admin.py</script>
@@ -626,24 +625,11 @@
       </component>
 
       <component name="RANGER_USERSYNC">
-        <pre-upgrade>
-          <task xsi:type="configure" id="usersync_log4j_parameterize" />
-        </pre-upgrade>
-        <pre-downgrade/>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
       </component>
 
-      <component name="RANGER_TAGSYNC">
-        <pre-upgrade>
-          <task xsi:type="configure" id="tagsync_log4j_parameterize" />
-        </pre-upgrade>
-        <pre-downgrade />
-        <upgrade>
-          <task xsi:type="restart-task" />
-        </upgrade>
-      </component>
     </service>
 
     <service name="RANGER_KMS">

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
index 9917ee1..9f3f42d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
@@ -617,7 +617,6 @@
             <script>scripts/ranger_admin.py</script>
             <function>setup_ranger_java_patches</function>
           </task>
-          <task xsi:type="configure" id="admin_log4j_parameterize" />
         </pre-upgrade>
 
         <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
@@ -630,7 +629,6 @@
 
       <component name="RANGER_USERSYNC">
         <pre-upgrade>
-          <task xsi:type="configure" id="usersync_log4j_parameterize" />
           <task xsi:type="configure" id="hdp_2_6_0_0_disable_delta_sync_during_upgrade"/>
         </pre-upgrade>
         <pre-downgrade />
@@ -638,15 +636,6 @@
           <task xsi:type="restart-task" />
         </upgrade>
       </component>
-      <component name="RANGER_TAGSYNC">
-        <pre-upgrade>
-          <task xsi:type="configure" id="tagsync_log4j_parameterize" />
-        </pre-upgrade>
-        <pre-downgrade />
-        <upgrade>
-          <task xsi:type="restart-task" />
-        </upgrade>
-      </component>
     </service>
 
     <service name="RANGER_KMS">

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index 14feab6..36d18ac 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -177,28 +177,11 @@
             <type>ranger-env</type>
             <transfer operation="delete" delete-key="bind_anonymous" />
           </definition>
-
-          <definition xsi:type="configure" id="admin_log4j_parameterize" summary="Parameterizing Ranger Log4J Properties">
-            <type>admin-log4j</type>
-            <set key="ranger_xa_log_maxfilesize" value="256"/>
-            <set key="ranger_xa_log_maxbackupindex" value="20"/>
-            <replace key="content" find="log4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.xa_log_appender.MaxFileSize={{ranger_xa_log_maxfilesize}}MB"/>
-            <replace key="content" find="log4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.xa_log_appender.MaxBackupIndex={{ranger_xa_log_maxbackupindex}}"/>
-          </definition>
-
         </changes>
       </component>
 
       <component name="RANGER_USERSYNC">
         <changes>
-          <definition xsi:type="configure" id="usersync_log4j_parameterize" summary="Parameterizing Ranger Usersync Log4J Properties">
-            <type>usersync-log4j</type>
-            <set key="ranger_usersync_log_maxfilesize" value="256"/>
-            <set key="ranger_usersync_log_maxbackupindex" value="20"/>
-            <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.logFile.MaxFileSize = {{ranger_usersync_log_maxfilesize}}MB"/>
-            <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.logFile.MaxBackupIndex = {{ranger_usersync_log_maxbackupindex}}"/>
-          </definition>
-
           <definition xsi:type="configure" id="hdp_2_6_0_0_disable_delta_sync_during_upgrade">
             <type>ranger-ugsync-site</type>
             <set key="ranger.usersync.ldap.deltasync" value="false"
@@ -206,17 +189,6 @@
           </definition>
         </changes>
       </component>
-      <component name="RANGER_TAGSYNC">
-        <changes>
-          <definition xsi:type="configure" id="tagsync_log4j_parameterize" summary="Parameterizing Ranger Tagsync Log4J Properties">
-            <type>tagsync-log4j</type>
-            <set key="ranger_tagsync_log_maxfilesize" value="256"/>
-            <set key="ranger_tagsync_log_number_of_backup_files" value="20"/>
-            <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.logFile.MaxFileSize = {{ranger_tagsync_log_maxfilesize}}MB"/>
-            <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.logFile.MaxBackupIndex = {{ranger_tagsync_log_number_of_backup_files}}"/>
-          </definition>
-        </changes>
-      </component>
     </service>
 
     <service name="RANGER_KMS">

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
index f093cb1..a6c595a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
@@ -358,22 +358,6 @@
       </execute-stage>
 
       <!--RANGER-->
-      <execute-stage service="RANGER" component="RANGER_ADMIN" title="Parameterizing Ranger Admin Log4J Properties">
-        <task xsi:type="configure" id="admin_log4j_parameterize">
-          <summary>Updating the Ranger admin Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-      <execute-stage service="RANGER" component="RANGER_USERSYNC" title="Parameterizing Ranger Usersync Log4J Properties">
-        <task xsi:type="configure" id="usersync_log4j_parameterize">
-          <summary>Updating the Ranger usersync Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-      <execute-stage service="RANGER" component="RANGER_TAGSYNC" title="Parameterizing Ranger Tagsync Log4J Properties">
-        <task xsi:type="configure" id="tagsync_log4j_parameterize">
-          <summary>Updating the Ranger tagsync Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-
       <execute-stage service="RANGER" component="RANGER_ADMIN" title="Configuring Ranger Alerts">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerWebAlertConfigAction">
           <summary>Configuring Ranger Alerts</summary>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
index e856288..e6aaf88 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
@@ -381,22 +381,6 @@
         <task xsi:type="configure" id="hdp_2_5_0_0_set_external_solrCloud_flag"/>
       </execute-stage>
 
-      <execute-stage service="RANGER" component="RANGER_ADMIN" title="Parameterizing Ranger Admin Log4J Properties">
-        <task xsi:type="configure" id="admin_log4j_parameterize">
-          <summary>Updating the Ranger admin Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-      <execute-stage service="RANGER" component="RANGER_USERSYNC" title="Parameterizing Ranger Usersync Log4J Properties">
-        <task xsi:type="configure" id="usersync_log4j_parameterize">
-          <summary>Updating the Ranger usersync Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-      <execute-stage service="RANGER" component="RANGER_TAGSYNC" title="Parameterizing Ranger Tagsync Log4J Properties">
-        <task xsi:type="configure" id="tagsync_log4j_parameterize">
-          <summary>Updating the Ranger tagsync Log4J properties to include parameterizations</summary>
-        </task>
-      </execute-stage>
-
       <execute-stage service="RANGER" component="RANGER_ADMIN" title="Calculating Ranger Properties">
         <condition xsi:type="security" type="kerberos"/>
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerKerberosConfigCalculation">

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
index 2a3e6b2..b1325b9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
@@ -562,7 +562,6 @@
     <service name="RANGER">
       <component name="RANGER_ADMIN">
         <pre-upgrade>
-          <task xsi:type="configure" id="admin_log4j_parameterize" />
           <task xsi:type="execute" hosts="all">
             <summary>Stop Ranger Admin</summary>
             <script>scripts/ranger_admin.py</script>
@@ -611,19 +610,6 @@
       </component>
 
       <component name="RANGER_USERSYNC">
-        <pre-upgrade>
-          <task xsi:type="configure" id="usersync_log4j_parameterize" />
-        </pre-upgrade>
-        <pre-downgrade/>
-        <upgrade>
-          <task xsi:type="restart-task" />
-        </upgrade>
-      </component>
-      <component name="RANGER_TAGSYNC">
-        <pre-upgrade>
-          <task xsi:type="configure" id="tagsync_log4j_parameterize" />
-        </pre-upgrade>
-        <pre-downgrade />
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5efa6531/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
index f736796..730e226 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
@@ -579,7 +579,6 @@
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_audit_db_ranger_admin_site" />
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_sso_property" />
           <task xsi:type="configure" id="hdp_2_5_0_0_set_external_solrCloud_flag"/>
-          <task xsi:type="configure" id="admin_log4j_parameterize" />
 
           <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerKerberosConfigCalculation">
             <summary>Calculating Ranger Properties</summary>
@@ -619,7 +618,6 @@
 
       <component name="RANGER_USERSYNC">
         <pre-upgrade>
-          <task xsi:type="configure" id="usersync_log4j_parameterize" />
           <task xsi:type="configure" id="hdp_2_6_0_0_disable_delta_sync_during_upgrade"/>
         </pre-upgrade>
         <pre-downgrade />
@@ -627,15 +625,6 @@
           <task xsi:type="restart-task" />
         </upgrade>
       </component>
-      <component name="RANGER_TAGSYNC">
-        <pre-upgrade>
-          <task xsi:type="configure" id="tagsync_log4j_parameterize" />
-        </pre-upgrade>
-        <pre-downgrade />
-        <upgrade>
-          <task xsi:type="restart-task" />
-        </upgrade>
-      </component>
     </service>
 
     <service name="RANGER_KMS">


[26/50] ambari git commit: AMBARI-20106. Additional config changes for HSI - HDP stack (Siddharth Seth via smohanty)

Posted by nc...@apache.org.
AMBARI-20106. Additional config changes for HSI - HDP stack (Siddharth Seth via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f7acf8ed
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f7acf8ed
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f7acf8ed

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: f7acf8edd4566ddf9f34419a6b88cf06d9136668
Parents: 0b6d1cd
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Wed Feb 22 07:01:22 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Wed Feb 22 15:53:54 2017 -0800

----------------------------------------------------------------------
 .../2.5/services/HIVE/configuration/hive-interactive-env.xml  | 2 +-
 .../2.5/services/HIVE/configuration/hive-interactive-site.xml | 6 ------
 .../2.6/services/HIVE/configuration/hive-interactive-env.xml  | 2 +-
 .../2.6/services/HIVE/configuration/hive-interactive-site.xml | 7 +++++++
 .../2.6/services/HIVE/configuration/tez-interactive-site.xml  | 6 +++---
 5 files changed, 12 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f7acf8ed/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
index 8f47182..c99e4fa 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
@@ -201,7 +201,7 @@
   </property>
   <property>
     <name>llap_java_opts</name>
-    <value>-XX:+AlwaysPreTouch {% if java_version > 7 %}-XX:+UseG1GC -XX:TLABSize=8m -XX:+ResizeTLAB -XX:+UseNUMA -XX:+AggressiveOpts -XX:MetaspaceSize=1024m -XX:InitiatingHeapOccupancyPercent=80 -XX:MaxGCPauseMillis=200{% else %}-XX:+PrintGCDetails -verbose:gc -XX:+PrintGCTimeStamps -XX:+UseNUMA -XX:+UseParallelGC{% endif %}</value>
+    <value>-XX:+AlwaysPreTouch {% if java_version > 7 %}-XX:+UseG1GC -XX:TLABSize=8m -XX:+ResizeTLAB -XX:+UseNUMA -XX:+AggressiveOpts -XX:MetaspaceSize=1024m -XX:InitiatingHeapOccupancyPercent=40 -XX:G1ReservePercent=20 -XX:MaxGCPauseMillis=200{% else %}-XX:+PrintGCDetails -verbose:gc -XX:+PrintGCTimeStamps -XX:+UseNUMA -XX:+UseParallelGC{% endif %}</value>
     <description>Java opts for llap daemons</description>
     <display-name>LLAP daemon java opts</display-name>
     <on-ambari-upgrade add="true"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f7acf8ed/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-site.xml
index 93e2020..8008bef 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-site.xml
@@ -389,12 +389,6 @@ limitations under the License.
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
-    <name>hive.llap.daemon.allow.permanent.fns</name>
-    <value>false</value>
-    <description>Whether LLAP daemon should localize the resources for permanent UDFs.</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
     <name>hive.llap.io.memory.size</name>
     <display-name>In-Memory Cache per Daemon</display-name>
     <description>The amount of memory reserved for Hive's optimized in-memory cache.</description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f7acf8ed/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-env.xml
index b659205..b54f622 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-env.xml
@@ -44,7 +44,7 @@
   </property>
   <property>
     <name>llap_java_opts</name>
-    <value>-XX:+AlwaysPreTouch {% if java_version > 7 %}-XX:+UseG1GC -XX:TLABSize=8m -XX:+ResizeTLAB -XX:+UseNUMA -XX:+AggressiveOpts -XX:InitiatingHeapOccupancyPercent=80 -XX:MaxGCPauseMillis=200{% else %}-XX:+PrintGCDetails -verbose:gc -XX:+PrintGCTimeStamps -XX:+UseNUMA -XX:+UseParallelGC{% endif %}{{heap_dump_opts}}</value>
+    <value>-XX:+AlwaysPreTouch {% if java_version > 7 %}-XX:+UseG1GC -XX:TLABSize=8m -XX:+ResizeTLAB -XX:+UseNUMA -XX:+AggressiveOpts -XX:InitiatingHeapOccupancyPercent=40 -XX:G1ReservePercent=20 -XX:MaxGCPauseMillis=200{% else %}-XX:+PrintGCDetails -verbose:gc -XX:+PrintGCTimeStamps -XX:+UseNUMA -XX:+UseParallelGC{% endif %}{{heap_dump_opts}}</value>
     <description>Java opts for llap application</description>
     <display-name>LLAP app java opts</display-name>
     <on-ambari-upgrade add="false"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f7acf8ed/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-site.xml
index ceea350..0610833 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-site.xml
@@ -96,4 +96,11 @@ limitations under the License.
     <on-ambari-upgrade add="true"/>
   </property>
 
+  <property>
+    <name>hive.llap.daemon.am.liveness.heartbeat.interval.ms</name>
+    <value>10000ms</value>
+    <description>Tez AM-LLAP heartbeat interval. This should be below the task timeout</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f7acf8ed/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml
index f7de121..3ce5fed 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml
@@ -29,7 +29,7 @@
   -->
   <property>
     <name>tez.task.heartbeat.timeout.check-ms</name>
-    <value>11000</value>
+    <value>15000</value>
     <description>The time interval, in milliseconds, at which the AM will check
       for timed out tasks
     </description>
@@ -37,7 +37,7 @@
   </property>
   <property>
     <name>tez.task.timeout-ms</name>
-    <value>20000</value>
+    <value>90000</value>
     <description>Amount of time the Tez AM waits before marking a task which
       has not sent in a heartbeat, as timed out
     </description>
@@ -45,7 +45,7 @@
   </property>
   <property>
     <name>tez.am.client.heartbeat.timeout.secs</name>
-    <value>60</value>
+    <value>90</value>
     <description>The time interval, after which an AM will kill itself, if it
       does not receive a heartbeat from the client.
     </description>


[32/50] ambari git commit: AMBARI-20134. Improper action node name after importing the asset (Venkata Sairam via pallavkul)

Posted by nc...@apache.org.
AMBARI-20134. Improper action node name after importing the asset (Venkata Sairam via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/82230d60
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/82230d60
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/82230d60

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 82230d60a8d417809e967e55983c37a8553258a0
Parents: 03d0587
Author: pallavkul <pa...@gmail.com>
Authored: Thu Feb 23 12:28:59 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Thu Feb 23 12:31:10 2017 +0530

----------------------------------------------------------------------
 .../src/main/resources/ui/app/components/flow-designer.js       | 5 +++--
 .../wfmanager/src/main/resources/ui/app/domain/workflow.js      | 4 +++-
 2 files changed, 6 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/82230d60/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
index b2b9c68..f97add8 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
@@ -449,7 +449,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
     this.createSnapshot();
     var transition = this.get("currentTransition").source.transitions.findBy('targetNode.id',currentTransition.targetNode.id);
     transition.source=this.get("currentTransition").source;
-    var actionNode = this.get("workflow").addNode(transition,actionNodeType);
+    this.generateUniqueNodeId(actionNodeType);
+    var actionNode = this.get("workflow").addNode(transition,actionNodeType, {}, "");
     this.rerender();
     this.doValidation();
     this.scrollToNewPosition();
@@ -724,7 +725,7 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
     this.set("configForSave", {json : workflowJson, xml : workflowXml,isDraft : isDraft});
     this.set("showingSaveWorkflow",true);
   },
-  openJobConfig (){
+  openJobConfig () {
     this.get('workflowContext').clearErrors();
     var workflowGenerator=WorkflowGenerator.create({workflow:this.get("workflow"),
     workflowContext:this.get('workflowContext')});

http://git-wip-us.apache.org/repos/asf/ambari/blob/82230d60/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow.js
index 0fc5d6c..be8d8de 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow.js
@@ -141,7 +141,9 @@ var Workflow= Ember.Object.extend(FindNodeMixin,{
       computedTarget=target.getTargets()[0];
     }
     var generatedNode=this.generatedNode(computedTarget,type,settings);
-    generatedNode.name = generatedNode.name+ "_"+ id;
+    if(id){
+      generatedNode.name = generatedNode.name+ "_"+ id;
+    }
     var sourceNode=transition.source;
     if (sourceNode && sourceNode.isPlaceholder()) {
       var orignalTransition=this.findTransitionTo(this.startNode,sourceNode.id);


[10/50] ambari git commit: AMBARI-20092. Hive View 2.0: User should be shown with appropriate error message when compute statistics action fails. (dipayanb)

Posted by nc...@apache.org.
AMBARI-20092. Hive View 2.0: User should be shown with appropriate error message when compute statistics action fails. (dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4e7bf34a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4e7bf34a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4e7bf34a

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 4e7bf34a7435148a8ab96fb9e46059189355edf8
Parents: 3e76e47
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Wed Feb 22 12:42:34 2017 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Wed Feb 22 12:43:08 2017 +0530

----------------------------------------------------------------------
 .../src/main/resources/ui/app/components/table-statistics.js    | 5 ++++-
 .../resources/ui/app/routes/databases/database/tables/table.js  | 2 ++
 .../ui/app/routes/databases/database/tables/table/edit.js       | 2 +-
 3 files changed, 7 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4e7bf34a/contrib/views/hive20/src/main/resources/ui/app/components/table-statistics.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/table-statistics.js b/contrib/views/hive20/src/main/resources/ui/app/components/table-statistics.js
index 1623e0e..0310cbc 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/table-statistics.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/table-statistics.js
@@ -17,8 +17,9 @@
  */
 
 import Ember from 'ember';
+import UILoggerMixin from '../mixins/ui-logger';
 
-export default Ember.Component.extend({
+export default Ember.Component.extend(UILoggerMixin, {
   statsService: Ember.inject.service(),
 
   analyseWithStatistics: false,
@@ -76,6 +77,7 @@ export default Ember.Component.extend({
       Ember.run.later(() => this.closeAndRefresh(), 2 * 1000);
     }).catch((err) => {
       this.set('analyseMessage', 'Job failed for analysing statistics of table');
+      this.get('logger').danger(`Job failed for analysing statistics of table '${tableName}'`, this.extractError(err));
       Ember.run.later(() => this.closeAndRefresh(), 2 * 1000);
     });
   },
@@ -96,6 +98,7 @@ export default Ember.Component.extend({
     }).catch((err) => {
       column.set('isFetchingStats', false);
       column.set('statsError', true);
+      this.get('logger').danger(`Job failed for fetching column statistics for column '${column.name}' of table '${tableName}'`, this.extractError(err));
     });
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4e7bf34a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js
index 6ee8100..53055cf 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js
@@ -38,6 +38,8 @@ export default Ember.Route.extend(UILoggerMixin, {
     console.log(model.get('detailedInfo.tableType').toLowerCase());
     if (model.get('detailedInfo.tableType').toLowerCase().indexOf('view') === -1) {
       newTabs = newTabs.rejectBy('name', 'viewInfo');
+    } else {
+      newTabs = newTabs.rejectBy('name', 'statistics');
     }
     controller.set('tabs', newTabs);
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/4e7bf34a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/edit.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/edit.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/edit.js
index d9f80e1..2fa61b4 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/edit.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/edit.js
@@ -57,7 +57,7 @@ export default TableMetaRouter.extend(UILoggerMixin, {
         this._transitionToTables();
       }).catch((err) => {
         this._modalStatus(false, 'Failed to edit table');
-        this.get('logger').danger(`Failed to  altered table '${settings.table}'`, this.extractError(err));
+        this.get('logger').danger(`Failed to alter table '${settings.table}'`, this.extractError(err));
       });
     }
 


[03/50] ambari git commit: AMBARI-20104 : Kafka Controller metrics are not being displayed. (avijayan)

Posted by nc...@apache.org.
AMBARI-20104 : Kafka Controller metrics are not being displayed. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ef0ae8bf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ef0ae8bf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ef0ae8bf

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: ef0ae8bfdbe515851ddcec3139db0250b1cc62df
Parents: 235f526
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Tue Feb 21 16:06:55 2017 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Tue Feb 21 16:06:55 2017 -0800

----------------------------------------------------------------------
 ambari-metrics/ambari-metrics-kafka-sink/pom.xml            | 7 ++++++-
 .../metrics2/sink/kafka/KafkaTimelineMetricsReporter.java   | 9 ++++++---
 2 files changed, 12 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ef0ae8bf/ambari-metrics/ambari-metrics-kafka-sink/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-kafka-sink/pom.xml b/ambari-metrics/ambari-metrics-kafka-sink/pom.xml
index 89700cc..91f8fe7 100644
--- a/ambari-metrics/ambari-metrics-kafka-sink/pom.xml
+++ b/ambari-metrics/ambari-metrics-kafka-sink/pom.xml
@@ -144,7 +144,7 @@ limitations under the License.
     <dependency>
       <groupId>org.apache.kafka</groupId>
       <artifactId>kafka_2.10</artifactId>
-      <version>0.8.1.1</version>
+      <version>0.10.1.0</version>
       <exclusions>
         <exclusion>
           <groupId>com.sun.jdmk</groupId>
@@ -169,6 +169,11 @@ limitations under the License.
       </exclusions>
     </dependency>
     <dependency>
+    <groupId>org.apache.kafka</groupId>
+    <artifactId>kafka-clients</artifactId>
+    <version>0.10.1.0</version>
+    </dependency>
+    <dependency>
       <groupId>com.yammer.metrics</groupId>
       <artifactId>metrics-core</artifactId>
       <version>2.2.0</version>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef0ae8bf/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java b/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
index b9ca9f5..9d492cb 100644
--- a/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
+++ b/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
@@ -378,9 +378,12 @@ public class KafkaTimelineMetricsReporter extends AbstractTimelineMetricsSink
       final long currentTimeMillis = System.currentTimeMillis();
       final String sanitizedName = sanitizeName(name);
 
-      cacheSanitizedTimelineMetric(currentTimeMillis, sanitizedName, "", Double.parseDouble(String.valueOf(gauge.value())));
-
-      populateMetricsList(context, MetricType.GAUGE, sanitizedName);
+      try {
+        cacheSanitizedTimelineMetric(currentTimeMillis, sanitizedName, "", Double.parseDouble(String.valueOf(gauge.value())));
+        populateMetricsList(context, MetricType.GAUGE, sanitizedName);
+      } catch (NumberFormatException ex) {
+        LOG.debug(ex.getMessage());
+      }
     }
 
     private String[] cacheKafkaMetered(long currentTimeMillis, String sanitizedName, Metered meter) {


[13/50] ambari git commit: AMBARI-20099.Password fields are not disabled when should be (onechiporenko)

Posted by nc...@apache.org.
AMBARI-20099.Password fields are not disabled when should be (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1a626759
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1a626759
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1a626759

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 1a6267597d5c445dc9bb12e4cfead5073cf134cf
Parents: 0ffa895
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Tue Feb 21 17:57:17 2017 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Wed Feb 22 10:23:00 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/utils/ember_reopen.js | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1a626759/ambari-web/app/utils/ember_reopen.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ember_reopen.js b/ambari-web/app/utils/ember_reopen.js
index ef996fc..c7f93af 100644
--- a/ambari-web/app/utils/ember_reopen.js
+++ b/ambari-web/app/utils/ember_reopen.js
@@ -237,6 +237,10 @@ Ember._HandlebarsBoundView.reopen({
   }
 });
 
+Ember.TextField.reopen({
+  attributeBindings: ['readOnly']
+});
+
 Ember.TextArea.reopen({
   attributeBindings: ['readonly']
 });


[07/50] ambari git commit: AMBARI-20067. Finalize Operations stage fails when Enabling Kerberos using the manual option (rlevas)

Posted by nc...@apache.org.
AMBARI-20067. Finalize Operations stage fails when Enabling Kerberos using the manual option (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d5cbe194
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d5cbe194
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d5cbe194

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d5cbe1940552c1ac6ed142b0d36bc84f45ba3c7f
Parents: 4ff93b0
Author: Robert Levas <rl...@hortonworks.com>
Authored: Tue Feb 21 21:15:39 2017 -0500
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Tue Feb 21 21:16:44 2017 -0500

----------------------------------------------------------------------
 .../kerberos/FinalizeKerberosServerAction.java  |  14 +-
 .../kerberos/KerberosServerAction.java          |   2 +-
 .../FinalizeKerberosServerActionTest.java       | 206 +++++++++++++++++++
 3 files changed, 215 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d5cbe194/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java
index 9f443b9..2742390 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java
@@ -200,12 +200,14 @@ public class FinalizeKerberosServerAction extends KerberosServerAction {
       }
     }
 
-    // Ensure the keytab files for the Ambari identities have the correct permissions
-    // This is important in the event a secure cluster was created via Blueprints since some
-    // user accounts and group may not have been created when the keytab files were created.
-    requestSharedDataContext.put(this.getClass().getName() + "_visited", new HashSet<String>());
-    processIdentities(requestSharedDataContext);
-    requestSharedDataContext.remove(this.getClass().getName() + "_visited");
+    if(getKDCType(getCommandParameters()) != KDCType.NONE) {
+      // Ensure the keytab files for the Ambari identities have the correct permissions
+      // This is important in the event a secure cluster was created via Blueprints since some
+      // user accounts and group may not have been created when the keytab files were created.
+      requestSharedDataContext.put(this.getClass().getName() + "_visited", new HashSet<String>());
+      processIdentities(requestSharedDataContext);
+      requestSharedDataContext.remove(this.getClass().getName() + "_visited");
+    }
 
     // Make sure this is a relevant directory. We don't want to accidentally allow _ANY_ directory
     // to be deleted.

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5cbe194/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
index cab3d8d..d404133 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
@@ -195,7 +195,7 @@ public abstract class KerberosServerAction extends AbstractServerAction {
     String kdcType = getCommandParameterValue(commandParameters, KDC_TYPE);
 
     return ((kdcType == null) || kdcType.isEmpty())
-        ? KDCType.MIT_KDC
+        ? KDCType.NONE
         : KDCType.translate(kdcType);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5cbe194/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerActionTest.java
new file mode 100644
index 0000000..9404480
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerActionTest.java
@@ -0,0 +1,206 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.kerberos;
+
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.Role;
+import org.apache.ambari.server.RoleCommand;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.agent.ExecutionCommand;
+import org.apache.ambari.server.audit.AuditLogger;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Host;
+import org.apache.ambari.server.state.SecurityState;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.easymock.EasyMockSupport;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+
+import junit.framework.Assert;
+
+public class FinalizeKerberosServerActionTest extends EasyMockSupport {
+  @Rule
+  public TemporaryFolder folder = new TemporaryFolder();
+
+  @Test
+  public void executeMITKDCOption() throws Exception {
+    String clusterName = "c1";
+    Injector injector = setup(clusterName);
+
+    File dataDirectory = createDataDirectory();
+
+    Map<String, String> commandParams = new HashMap<>();
+    commandParams.put(KerberosServerAction.KDC_TYPE, KDCType.MIT_KDC.name());
+    commandParams.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
+
+    ExecutionCommand executionCommand = createMockExecutionCommand(clusterName, commandParams);
+    HostRoleCommand hostRoleCommand = createMockHostRoleCommand();
+
+    PrincipalKeyCredential principleKeyCredential = createMock(PrincipalKeyCredential.class);
+
+    KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
+    expect(kerberosHelper.getKDCAdministratorCredentials(clusterName)).andReturn(principleKeyCredential).anyTimes();
+
+    replayAll();
+
+    ConcurrentMap<String, Object> requestSharedDataContext = new ConcurrentHashMap<String, Object>();
+
+    FinalizeKerberosServerAction action = injector.getInstance(FinalizeKerberosServerAction.class);
+    action.setExecutionCommand(executionCommand);
+    action.setHostRoleCommand(hostRoleCommand);
+
+    Assert.assertTrue(dataDirectory.exists());
+
+    CommandReport commandReport = action.execute(requestSharedDataContext);
+
+    assertSuccess(commandReport);
+    Assert.assertTrue(!dataDirectory.exists());
+
+    verifyAll();
+  }
+
+  @Test
+  public void executeManualOption() throws Exception {
+    String clusterName = "c1";
+    Injector injector = setup(clusterName);
+
+    File dataDirectory = createDataDirectory();
+
+    Map<String, String> commandParams = new HashMap<>();
+    commandParams.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());
+
+    ExecutionCommand executionCommand = createMockExecutionCommand(clusterName, commandParams);
+    HostRoleCommand hostRoleCommand = createMockHostRoleCommand();
+
+    replayAll();
+
+    ConcurrentMap<String, Object> requestSharedDataContext = new ConcurrentHashMap<String, Object>();
+
+    FinalizeKerberosServerAction action = injector.getInstance(FinalizeKerberosServerAction.class);
+    action.setExecutionCommand(executionCommand);
+    action.setHostRoleCommand(hostRoleCommand);
+
+    Assert.assertTrue(dataDirectory.exists());
+
+    CommandReport commandReport = action.execute(requestSharedDataContext);
+
+    assertSuccess(commandReport);
+
+    Assert.assertTrue(!dataDirectory.exists());
+
+    verifyAll();
+  }
+
+  private File createDataDirectory() throws IOException {
+    File directory = folder.newFolder();
+    File dataDirectory = new File(directory, KerberosServerAction.DATA_DIRECTORY_PREFIX + "_test");
+
+    Assert.assertTrue(dataDirectory.mkdir());
+
+    return dataDirectory;
+  }
+
+  private void assertSuccess(CommandReport commandReport) {
+    Assert.assertEquals(0, commandReport.getExitCode());
+    Assert.assertEquals(HostRoleStatus.COMPLETED.name(), commandReport.getStatus());
+    Assert.assertEquals("{}", commandReport.getStructuredOut());
+  }
+
+  private ExecutionCommand createMockExecutionCommand(String clusterName, Map<String, String> commandParams) {
+    ExecutionCommand executionCommand = createMock(ExecutionCommand.class);
+    expect(executionCommand.getClusterName()).andReturn(clusterName).anyTimes();
+    expect(executionCommand.getCommandParams()).andReturn(commandParams).anyTimes();
+    expect(executionCommand.getRoleCommand()).andReturn(RoleCommand.EXECUTE).anyTimes();
+    expect(executionCommand.getRole()).andReturn(Role.AMBARI_SERVER_ACTION.name()).anyTimes();
+    expect(executionCommand.getConfigurationTags()).andReturn(Collections.<String, Map<String, String>>emptyMap()).anyTimes();
+    expect(executionCommand.getServiceName()).andReturn("AMBARI_SERVER").anyTimes();
+    expect(executionCommand.getTaskId()).andReturn(3L).anyTimes();
+
+    return executionCommand;
+  }
+
+  private HostRoleCommand createMockHostRoleCommand() {
+    HostRoleCommand hostRoleCommand = createMock(HostRoleCommand.class);
+
+    expect(hostRoleCommand.getRequestId()).andReturn(1L).anyTimes();
+    expect(hostRoleCommand.getStageId()).andReturn(2L).anyTimes();
+    expect(hostRoleCommand.getTaskId()).andReturn(3L).anyTimes();
+
+    return hostRoleCommand;
+  }
+
+  private Injector setup(String clusterName) throws AmbariException {
+    final Map<String, Host> clusterHostMap = new HashMap<>();
+    clusterHostMap.put("host1", createMock(Host.class));
+
+    final ServiceComponentHost serviceComponentHost = createMock(ServiceComponentHost.class);
+    expect(serviceComponentHost.getSecurityState()).andReturn(SecurityState.SECURING).anyTimes();
+    expect(serviceComponentHost.getServiceName()).andReturn("SERVICE1").anyTimes();
+    expect(serviceComponentHost.getServiceComponentName()).andReturn("COMPONENT1A").anyTimes();
+    expect(serviceComponentHost.getHostName()).andReturn("host1").anyTimes();
+    expect(serviceComponentHost.getDesiredSecurityState()).andReturn(SecurityState.SECURED_KERBEROS).anyTimes();
+    serviceComponentHost.setSecurityState(SecurityState.SECURED_KERBEROS);
+    expectLastCall().once();
+
+    final List<ServiceComponentHost> serviceComponentHosts = new ArrayList<>();
+    serviceComponentHosts.add(serviceComponentHost);
+
+    final Cluster cluster = createMock(Cluster.class);
+    expect(cluster.getClusterName()).andReturn(clusterName).anyTimes();
+    expect(cluster.getServiceComponentHosts("host1")).andReturn(serviceComponentHosts).anyTimes();
+
+    final Clusters clusters = createMock(Clusters.class);
+    expect(clusters.getHostsForCluster(clusterName)).andReturn(clusterHostMap).anyTimes();
+    expect(clusters.getCluster(clusterName)).andReturn(cluster).anyTimes();
+
+    return Guice.createInjector(new AbstractModule() {
+
+      @Override
+      protected void configure() {
+        bind(KerberosHelper.class).toInstance(createMock(KerberosHelper.class));
+        bind(Clusters.class).toInstance(clusters);
+        bind(AuditLogger.class).toInstance(createNiceMock(AuditLogger.class));
+      }
+    });
+  }
+
+}
\ No newline at end of file


[14/50] ambari git commit: AMBARI-20083 Grafana Dashboard for Log Search (mgergely)

Posted by nc...@apache.org.
AMBARI-20083 Grafana Dashboard for Log Search (mgergely)

Change-Id: I58fe89a8dff634b20a54eac4b385aeee84d6b566


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/df227658
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/df227658
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/df227658

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: df227658c64ed9068abc4cd7af8c03aa74b5c60e
Parents: 1a62675
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Wed Feb 22 10:19:41 2017 +0100
Committer: Miklos Gergely <mg...@hortonworks.com>
Committed: Wed Feb 22 10:19:41 2017 +0100

----------------------------------------------------------------------
 .../HDP/grafana-logsearch-home.json             | 600 +++++++++++++++++++
 1 file changed, 600 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/df227658/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-logsearch-home.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-logsearch-home.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-logsearch-home.json
new file mode 100644
index 0000000..3eb2d8a
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-logsearch-home.json
@@ -0,0 +1,600 @@
+{
+  "id": null,
+  "title": "Log Search - Home",
+  "originalTitle": "Log Search - Home",
+  "tags": [
+    "2.5.0.0",
+    "builtin",
+    "logsearch",
+    "builtin",
+    "2.5.0.0"
+  ],
+  "style": "dark",
+  "timezone": "browser",
+  "editable": true,
+  "hideControls": false,
+  "sharedCrosshair": false,
+  "rows": [
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "25px",
+      "panels": [
+        {
+          "content": "<h4 align=\"center\">Metrics to see the status for the Log Search Service.</h4>\n<h6 style=\"color:red;\" align=\"center\">This dashboard is managed by Ambari.  You may lose any changes made to this dashboard.  If you want to customize, make your own copy.</h6>",
+          "editable": true,
+          "error": false,
+          "id": 9,
+          "isNew": true,
+          "links": [],
+          "mode": "html",
+          "span": 12,
+          "style": {},
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "title": "New row"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "25px",
+      "panels": [
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "format": "none",
+          "height": "10",
+          "id": 6,
+          "interval": null,
+          "isNew": true,
+          "links": [],
+          "maxDataPoints": 100,
+          "minSpan": 1,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "span": 12,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "",
+              "app": "logfeeder",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "input.files.count",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "thresholds": "",
+          "title": "Number of log files handled",
+          "type": "singlestat",
+          "valueFontSize": "200%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        }
+      ],
+      "title": "New row"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 3,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "Input lines read",
+              "app": "logfeeder",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "input.files.read_lines",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Input Lines",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 4,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "Input data read",
+              "app": "logfeeder",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "input.files.read_bytes",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Input Data",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "short"
+          ]
+        }
+      ],
+      "title": "New row"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 7,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "Grok filter errors",
+              "app": "logfeeder",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hide": false,
+              "hosts": "",
+              "metric": "filter.error.grok",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Grok Filter Errors",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 8,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "Key-value filter errors",
+              "app": "logfeeder",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "filter.error.keyvalue",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Key-value Filter Errors",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        }
+      ],
+      "title": "New row"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 1,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "Log entries published",
+              "app": "logfeeder",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hide": false,
+              "hosts": "",
+              "metric": "output.solr.write_logs",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Publications",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "short"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 5,
+          "isNew": true,
+          "legend": {
+            "avg": false,
+            "current": false,
+            "max": false,
+            "min": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "Data written to Solr",
+              "app": "logfeeder",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "metric": "output.solr.write_bytes",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Output Traffic",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "short"
+          ]
+        }
+      ],
+      "title": "Row"
+    }
+  ],
+  "time": {
+    "from": "now-6h",
+    "to": "now"
+  },
+  "timepicker": {
+    "refresh_intervals": [
+      "5s",
+      "10s",
+      "30s",
+      "1m",
+      "5m",
+      "15m",
+      "30m",
+      "1h",
+      "2h",
+      "1d"
+    ],
+    "time_options": [
+      "5m",
+      "15m",
+      "1h",
+      "6h",
+      "12h",
+      "24h",
+      "2d",
+      "7d",
+      "30d"
+    ]
+  },
+  "templating": {
+    "list": []
+  },
+  "annotations": {
+    "list": []
+  },
+  "schemaVersion": 8,
+  "version": 4,
+  "links": []
+}
\ No newline at end of file


[38/50] ambari git commit: AMBARI-20135. Hive view 2.0 : Hanging query - no cancel available.(gauravn7)

Posted by nc...@apache.org.
AMBARI-20135. Hive view 2.0 : Hanging query - no cancel available.(gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6572b16e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6572b16e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6572b16e

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 6572b16e8f721cfabba4b6cdb10d52e6f4f9e382
Parents: 70f23c0
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Thu Feb 23 19:02:13 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Thu Feb 23 19:02:13 2017 +0530

----------------------------------------------------------------------
 .../ambari/view/hive20/actor/JdbcConnector.java  |  1 +
 .../resources/ui/app/routes/queries/query.js     | 19 +++++++++++++++++--
 .../src/main/resources/ui/app/services/jobs.js   |  5 +++++
 .../resources/ui/app/templates/queries/query.hbs | 10 +++++++++-
 4 files changed, 32 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6572b16e/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
index 1d9744b..61f0b66 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
@@ -238,6 +238,7 @@ public class JdbcConnector extends HiveActor {
     try {
       isCancelCalled = true;
       connectionDelegate.cancel();
+      LOG.info("Cancelled JobId:"+ jobId);
     } catch (SQLException e) {
       LOG.error("Failed to cancel job. JobId: {}. {}", message.getJobId(), e);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6572b16e/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index 5bed2c6..16103b7 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -137,6 +137,8 @@ export default Ember.Route.extend(UILoggerMixin, {
     controller.set('currentJobId', null);
     controller.set('queryResult', model.get('queryResult'));
     controller.set('isJobSuccess', false);
+    controller.set('isJobCancelled', false);
+    controller.set('isJobCreated', false);
 
     controller.set('isExportResultSuccessMessege', false);
     controller.set('isExportResultFailureMessege', false);
@@ -236,10 +238,10 @@ export default Ember.Route.extend(UILoggerMixin, {
       this.get('controller').set('currentJobId', null);
 
       if(!Ember.isEmpty(isVisualExplainQuery)){
-        var isVisualExplainQuery = true;
+        isVisualExplainQuery = true;
         this.get('controller').set('isVisualExplainQuery', true);
       } else {
-        var isVisualExplainQuery = false;
+        isVisualExplainQuery = false;
         this.get('controller').set('isVisualExplainQuery', false);
       }
 
@@ -285,6 +287,7 @@ export default Ember.Route.extend(UILoggerMixin, {
 
 
       this.get('controller.model').set('isQueryRunning', true);
+      this.get('controller.model').set('isJobCreated',false);
 
       //this.get('controller').set('queryResult', self.get('controller').get('queryResult'));
       //this.get('controller.model').set('queryResult', self.get('controller').get('queryResult'));
@@ -306,10 +309,13 @@ export default Ember.Route.extend(UILoggerMixin, {
         self.get('controller.model').set('queryFile', data.job.queryFile);
         self.get('controller.model').set('logFile', data.job.logFile);
         self.get('controller').set('currentJobId', data.job.id);
+        self.get('controller').set('isJobCreated',true);
 
         self.get('jobs').waitForJobToComplete(data.job.id, 2 * 1000, false)
           .then((status) => {
             self.get('controller').set('isJobSuccess', true);
+            self.get('controller').set('isJobCancelled', false);
+            self.get('controller').set('isJobCreated', false);
             let jobDetails = self.store.peekRecord('job', data.job.id);
             console.log(jobDetails);
             self.send('getJobResult', data, payload.title, jobDetails);
@@ -317,6 +323,9 @@ export default Ember.Route.extend(UILoggerMixin, {
 
           }, (error) => {
             console.log('error', error);
+            self.get('controller').set('isJobSuccess', false);
+            self.get('controller').set('isJobCancelled', false);
+            self.get('controller').set('isJobCreated', false);
             self.get('logger').danger('Failed to execute query.', self.extractError(error));
             self.send('resetDefaultWorksheet');
           });
@@ -328,6 +337,12 @@ export default Ember.Route.extend(UILoggerMixin, {
       });
     },
 
+    stopQuery(){
+      let jobId = this.get('controller').get('currentJobId');
+      this.get('jobs').stopJob(jobId)
+        .then( data => this.get('controller').set('isJobCancelled', true));
+    },
+
     getJobResult(data, payloadTitle, jobDetails){
       let self = this;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6572b16e/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
index 8503715..273cd90 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
@@ -62,6 +62,11 @@ export default Ember.Service.extend({
     });
   },
 
+  stopJob : function(jobId) {
+    return this.get('store').findRecord('job', jobId)
+      .then(job => job.destroyRecord())
+  },
+
   _fetchDummyResult(jobId) {
     this.get('store').adapterFor('job').fetchResult(jobId);
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/6572b16e/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
index 9be3873..8a771eb 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
@@ -28,7 +28,15 @@
         {{query-editor query=currentQuery updateQuery='updateQuery' }}
       </div>
       <div class="row query-editor-controls">
-        <button class="btn btn-success" {{action "executeQuery" }} disabled={{ worksheet.isQueryRunning}}>{{fa-icon "check"}} Execute</button>
+        {{#if isJobCreated}}
+          {{#if isJobCancelled}}
+             <button class="btn btn-warning" disabled >{{fa-icon "ban"}} Stopping</button>
+          {{else}}
+            <button class="btn btn-warning" {{action "stopQuery" }} >{{fa-icon "ban"}} Stop</button>
+          {{/if}}
+        {{else}}
+          <button class="btn btn-success" {{action "executeQuery" }} disabled={{ worksheet.isQueryRunning}} >{{fa-icon "check"}} Execute</button>
+        {{/if}}
 
         <button class="btn btn-default" {{action "openWorksheetModal" }}>{{fa-icon "save"}} Save As</button>
         <div class="btn-group">


[24/50] ambari git commit: AMBARI-20114. Hive2: Visual Explain - Eliminate "limit -1" (pallavkul)

Posted by nc...@apache.org.
AMBARI-20114. Hive2: Visual Explain - Eliminate "limit -1" (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a6f3420e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a6f3420e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a6f3420e

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: a6f3420e71d4c28a22023e7c3b009f0bc5503acc
Parents: 7cc2a20
Author: pallavkul <pa...@gmail.com>
Authored: Wed Feb 22 19:48:18 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Wed Feb 22 19:48:18 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/utils/hive-explainer/renderer-force.js   | 2 +-
 .../src/main/resources/ui/app/utils/hive-explainer/renderer.js     | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a6f3420e/contrib/views/hive20/src/main/resources/ui/app/utils/hive-explainer/renderer-force.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/utils/hive-explainer/renderer-force.js b/contrib/views/hive20/src/main/resources/ui/app/utils/hive-explainer/renderer-force.js
index 2dfdc86..a73d1b2 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/utils/hive-explainer/renderer-force.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/utils/hive-explainer/renderer-force.js
@@ -144,7 +144,7 @@ function getRenderer(type) {
           </div>
           <div class='operator-body' style='margin-left: 10px;'>
             <div>${getOperatorLabel(d)}</div>
-            ${d['limit:'] ? '<div><span style="font-weight: lighter;">Limit:</span> ' + d['limit:'] + ' </div>' : ''}
+            ${(d['limit:'] && d['limit:'] > -1) ? '<div><span style="font-weight: lighter;">Limit:</span> ' + d['limit:'] + ' </div>' : ''}
           </div>
         </div>
       `);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a6f3420e/contrib/views/hive20/src/main/resources/ui/app/utils/hive-explainer/renderer.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/utils/hive-explainer/renderer.js b/contrib/views/hive20/src/main/resources/ui/app/utils/hive-explainer/renderer.js
index a332802..5fa5d2e 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/utils/hive-explainer/renderer.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/utils/hive-explainer/renderer.js
@@ -133,7 +133,7 @@ function getRenderer(type) {
           </div>
           <div class='operator-body' style='margin-left: 10px;'>
             <div>${getOperatorLabel(d)}</div>
-            ${d['limit:'] ? '<div><span style="font-weight: lighter;">Limit:</span> ' + d['limit:'] + ' </div>' : ''}
+            ${(d['limit:'] && d['limit:'] > -1) ? '<div><span style="font-weight: lighter;">Limit:</span> ' + d['limit:'] + ' </div>' : ''}
           </div>
         </div>
       `);


[22/50] ambari git commit: AMBARI-20088 ADDENDUM Log Search should handle turned off Credential Store too (mgergely)

Posted by nc...@apache.org.
AMBARI-20088 ADDENDUM Log Search should handle turned off Credential Store too (mgergely)

Change-Id: I277228c32a512eb36da0d7fba93ce5c2c0a55455


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/569f5661
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/569f5661
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/569f5661

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 569f5661d4223f6eaf4d82f0771c9cac19883cfb
Parents: 45b423b
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Wed Feb 22 13:17:03 2017 +0100
Committer: Miklos Gergely <mg...@hortonworks.com>
Committed: Wed Feb 22 13:17:03 2017 +0100

----------------------------------------------------------------------
 .../src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py     | 6 ++++++
 .../src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py     | 6 ++++++
 2 files changed, 12 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/569f5661/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
index 064978b..0a7f074 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
+++ b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
@@ -60,6 +60,12 @@ class TestLogFeeder(RMFTestCase):
                               mode=0644,
                               content=''
                               )
+    self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/keys/ks_pass.txt',
+                              action = ['delete']
+                              )
+    self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/keys/ts_pass.txt',
+                              action = ['delete']
+                              )
     self.assertResourceCalled('PropertiesFile', '/etc/ambari-logsearch-logfeeder/conf/logfeeder.properties',
                               properties={'hadoop.security.credential.provider.path': 'jceks://file/etc/ambari-logsearch-logfeeder/conf/logfeeder.jceks',
                                           'logfeeder.checkpoint.folder': '/etc/ambari-logsearch-logfeeder/conf/checkpoints',

http://git-wip-us.apache.org/repos/asf/ambari/blob/569f5661/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
index 587561a..b283a9f 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
+++ b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
@@ -76,6 +76,12 @@ class TestLogSearch(RMFTestCase):
                               mode = 0644,
                               content = ''
     )
+    self.assertResourceCalled('File', '/etc/ambari-logsearch-portal/conf/keys/ks_pass.txt',
+                              action = ['delete']
+    )
+    self.assertResourceCalled('File', '/etc/ambari-logsearch-portal/conf/keys/ts_pass.txt',
+                              action = ['delete']
+    )
     self.assertResourceCalled('PropertiesFile', '/etc/ambari-logsearch-portal/conf/logsearch.properties',
                               properties = {'hadoop.security.credential.provider.path': 'jceks://file/etc/ambari-logsearch-portal/conf/logsearch.jceks',
                                             'logsearch.audit.logs.split.interval.mins': '1',


[37/50] ambari git commit: AMBARI-20119. "Use RedHat Satellite/Spacewalk" setting not working correctly in Ambari 2.4.2.0 (dlysnichenko)

Posted by nc...@apache.org.
AMBARI-20119. "Use RedHat Satellite/Spacewalk" setting not working correctly in Ambari 2.4.2.0 (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/70f23c07
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/70f23c07
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/70f23c07

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 70f23c07058b700a37b046e8412217b11a661aa4
Parents: aeb5d4e
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Thu Feb 23 15:10:33 2017 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Thu Feb 23 15:10:33 2017 +0200

----------------------------------------------------------------------
 .../ambari/server/state/RepositoryInfo.java     | 20 ++++++++++++++++++--
 .../stack/upgrade/RepositoryVersionHelper.java  |  2 ++
 .../upgrade/RepositoryVersionHelperTest.java    |  3 ++-
 3 files changed, 22 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/70f23c07/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
index 41ea5e7..31a00ca 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
@@ -34,6 +34,7 @@ public class RepositoryInfo {
   private String latestBaseUrl;
   private boolean repoSaved = false;
   private boolean unique = false;
+  private boolean ambariManagedRepositories = true;
 
   /**
    * @return the baseUrl
@@ -170,6 +171,7 @@ public class RepositoryInfo {
         + ", repoName=" + repoName
         + ", mirrorsList=" + mirrorsList
         + ", unique=" + unique
+        + ", ambariManagedRepositories=" + ambariManagedRepositories
         + " ]";
   }
 
@@ -186,12 +188,13 @@ public class RepositoryInfo {
         Objects.equal(repoName, that.repoName) &&
         Objects.equal(mirrorsList, that.mirrorsList) &&
         Objects.equal(defaultBaseUrl, that.defaultBaseUrl) &&
-        Objects.equal(latestBaseUrl, that.latestBaseUrl);
+        Objects.equal(latestBaseUrl, that.latestBaseUrl) &&
+        Objects.equal(ambariManagedRepositories, that.ambariManagedRepositories);
   }
 
   @Override
   public int hashCode() {
-    return Objects.hashCode(baseUrl, osType, repoId, repoName, mirrorsList, defaultBaseUrl, latestBaseUrl, repoSaved, unique);
+    return Objects.hashCode(baseUrl, osType, repoId, repoName, mirrorsList, defaultBaseUrl, latestBaseUrl, repoSaved, unique, ambariManagedRepositories);
   }
 
   public RepositoryResponse convertToResponse()
@@ -236,4 +239,17 @@ public class RepositoryInfo {
     }
   };
 
+  /**
+   * @return true if repositories managed by ambari
+   */
+  public boolean isAmbariManagedRepositories() {
+    return ambariManagedRepositories;
+  }
+
+  /**
+   * @param ambariManagedRepositories set is repositories managed by ambari
+   */
+  public void setAmbariManagedRepositories(boolean ambariManagedRepositories) {
+    this.ambariManagedRepositories = ambariManagedRepositories;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/70f23c07/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
index 04f350f..7d5cbd0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
@@ -162,6 +162,7 @@ public class RepositoryVersionHelper {
         repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_MIRRORS_LIST_PROPERTY_ID, repository.getMirrorsList());
         repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_UNIQUE_PROPERTY_ID, repository.isUnique());
         repositoriesJson.add(repositoryJson);
+        operatingSystemJson.addProperty(OperatingSystemResourceProvider.OPERATING_SYSTEM_AMBARI_MANAGED_REPOS, repository.isAmbariManagedRepositories());
       }
       operatingSystemJson.add(RepositoryVersionResourceProvider.SUBRESOURCE_REPOSITORIES_PROPERTY_ID, repositoriesJson);
       operatingSystemJson.addProperty(OperatingSystemResourceProvider.OPERATING_SYSTEM_OS_TYPE_PROPERTY_ID, operatingSystem.getKey());
@@ -179,6 +180,7 @@ public class RepositoryVersionHelper {
         repositoryInfo.setRepoName(repositoryEntity.getName());
         repositoryInfo.setBaseUrl(repositoryEntity.getBaseUrl());
         repositoryInfo.setOsType(os.getOsType());
+        repositoryInfo.setAmbariManagedRepositories(os.isAmbariManagedRepos());
         repositoryInfos.add(repositoryInfo);
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/70f23c07/ambari-server/src/test/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelperTest.java
index 00b8df4..a852743 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelperTest.java
@@ -58,9 +58,10 @@ public class RepositoryVersionHelperTest {
     repository.setOsType("os");
     repository.setRepoId("repoId");
     repository.setUnique(true);
+    repository.setAmbariManagedRepositories(true);
     repositories.add(repository);
 
     final String serialized = helper.serializeOperatingSystems(repositories);
-    Assert.assertEquals("[{\"repositories\":[{\"Repositories/base_url\":\"baseurl\",\"Repositories/repo_id\":\"repoId\",\"Repositories/unique\":true}],\"OperatingSystems/os_type\":\"os\"}]", serialized);
+    Assert.assertEquals("[{\"OperatingSystems/ambari_managed_repositories\":true,\"repositories\":[{\"Repositories/base_url\":\"baseurl\",\"Repositories/repo_id\":\"repoId\",\"Repositories/unique\":true}],\"OperatingSystems/os_type\":\"os\"}]", serialized);
   }
 }


[12/50] ambari git commit: AMBARI-20111. Hive2: Query in query editor is getting modified while navigating across different tabs (pallavkul)

Posted by nc...@apache.org.
AMBARI-20111. Hive2: Query in query editor is getting modified while navigating across different tabs (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0ffa8953
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0ffa8953
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0ffa8953

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 0ffa89532854e8760f543acd3434952eebc24f8c
Parents: c655d7c
Author: pallavkul <pa...@gmail.com>
Authored: Wed Feb 22 13:15:35 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Wed Feb 22 13:15:35 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/configs/result-tabs.js   |  6 +++---
 .../main/resources/ui/app/routes/queries/query.js  | 17 ++++++++++++-----
 .../main/resources/ui/app/routes/savedqueries.js   |  3 ---
 .../resources/ui/app/templates/savedqueries.hbs    |  1 -
 4 files changed, 15 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0ffa8953/contrib/views/hive20/src/main/resources/ui/app/configs/result-tabs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/configs/result-tabs.js b/contrib/views/hive20/src/main/resources/ui/app/configs/result-tabs.js
index f48dcd8..3284533 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/configs/result-tabs.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/configs/result-tabs.js
@@ -23,19 +23,19 @@ let resultsTabs = [
     name: 'results',
     label: 'RESULTS',
     link: 'queries.query.results',
-    faIcon: 'paper-plane'
+    faIcon: 'file-text-o'
   }),
   Ember.Object.create({
     name: 'log',
     label: 'LOG',
     link: 'queries.query.log',
-    faIcon: 'paper-plane'
+    faIcon: 'list'
   }),
   Ember.Object.create({
     name: 'visual-explain',
     label: 'VISUAL EXPLAIN',
     link: 'queries.query.visual-explain',
-    faIcon: 'paper-plane'
+    faIcon: 'link'
   }),
   Ember.Object.create({
     name: 'tez-ui',

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ffa8953/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index 9196d8b..c549d7b 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -211,8 +211,8 @@ export default Ember.Route.extend(UILoggerMixin, {
         this.get('controller').set('isVisualExplainQuery', false);
       }
 
-
-      let queryInput = this.get('controller').get('currentQuery');
+      let originalQuery = this.get('controller').get('currentQuery');
+      let queryInput = originalQuery;
 
       if (isVisualExplainQuery) {
         queryInput = "";
@@ -233,16 +233,23 @@ export default Ember.Route.extend(UILoggerMixin, {
         }
       }
 
-      this.get('controller.model').set('query', queryInput);
+      this.get('controller.model').set('query', originalQuery);
 
       let dbid = this.get('controller.model').get('selectedDb');
       let worksheetTitle = this.get('controller.model').get('title');
 
       this.get('controller.model').set('jobData', []);
+      self.get('controller.model').set('currentPage', 0);
+      self.get('controller.model').set('previousPage', -1 );
+      self.get('controller.model').set('nextPage', 1);
+      self.get('controller.model').set('queryResult', {'schema' :[], 'rows' :[]});
+      self.get('controller.model').set('visualExplainJson', null);
+
+
       this.get('controller.model').set('isQueryRunning', true);
 
-      this.get('controller').set('queryResult', self.get('controller').get('queryResult'));
-      this.get('controller.model').set('queryResult', self.get('controller').get('queryResult'));
+      //this.get('controller').set('queryResult', self.get('controller').get('queryResult'));
+      //this.get('controller.model').set('queryResult', self.get('controller').get('queryResult'));
 
       let globalSettings = this.get('globalSettings');
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ffa8953/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js b/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js
index c2a0c8c..7b18424 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js
@@ -36,9 +36,6 @@ export default Ember.Route.extend({
   },
 
   actions: {
-    historySavedQuery(id){
-      console.log('historySavedQuery', id);
-    },
 
     deleteSavedQuery(){
       let queryId = this.get('controller').get('selectedSavedQueryId');

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ffa8953/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
index c3d907d..c0b5e82 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
@@ -39,7 +39,6 @@
               <div class="dropdown">
                 <a class="dropdown-toggle" id="dropdownMenu1121" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true">{{fa-icon "cog"}}</a>
                 <ul class="dropdown-menu dropdown-menu-right" aria-labelledby="dropdownMenu">
-                  <li><a href="#" {{action "historySavedQuery" savedQuery.id }} class="text-uppercase">{{fa-icon "history"}} History</a></li>
                   <li><a href="#" {{action "openDeleteSavedQueryModal" savedQuery.id}} class="text-uppercase">{{fa-icon "remove"}} Delete</a></li>
                   <li><a href="#" {{action "openAsWorksheet" savedQuery }} class="text-uppercase">{{fa-icon "folder-open-o"}} Open as worksheet</a></li>
                 </ul>


[40/50] ambari git commit: AMBARI-20137. File upload from Ambari File View does not work in Chrome 56.0.2924.87 and IE 11.(gauravn7)

Posted by nc...@apache.org.
AMBARI-20137. File upload from Ambari File View does not work in Chrome 56.0.2924.87 and IE 11.(gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e1cb3b1e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e1cb3b1e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e1cb3b1e

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: e1cb3b1e2a01c04bac96ea3b5b5957848eec721a
Parents: 98f40e3
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Thu Feb 23 19:09:10 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Thu Feb 23 19:12:34 2017 +0530

----------------------------------------------------------------------
 .../ambari/view/commons/hdfs/UploadService.java  |  2 +-
 .../resources/ui/app/adapters/application.js     |  6 ++++++
 .../resources/ui/app/components/upload-file.js   | 19 +++++++++++++++----
 3 files changed, 22 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e1cb3b1e/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UploadService.java b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UploadService.java
index 26a4873..a2fe7eb 100644
--- a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UploadService.java
+++ b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UploadService.java
@@ -94,7 +94,7 @@ public class UploadService extends HdfsService {
     try {
       if (!path.endsWith("/"))
         path = path + "/";
-      String filePath = path + contentDisposition.getFileName();
+      String filePath = path + new String(contentDisposition.getFileName().getBytes("ISO8859-1"),"UTF-8");
       uploadFile(filePath, uploadedInputStream);
       return Response.ok(
           getApi().fileStatusToJSON(getApi().getFileStatus(filePath)))

http://git-wip-us.apache.org/repos/asf/ambari/blob/e1cb3b1e/contrib/views/files/src/main/resources/ui/app/adapters/application.js
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/adapters/application.js b/contrib/views/files/src/main/resources/ui/app/adapters/application.js
index 4042851..059d379 100644
--- a/contrib/views/files/src/main/resources/ui/app/adapters/application.js
+++ b/contrib/views/files/src/main/resources/ui/app/adapters/application.js
@@ -20,6 +20,12 @@ import DS from 'ember-data';
 import Ember from 'ember';
 
 export default DS.RESTAdapter.extend({
+  init: function () {
+    Ember.$.ajaxSetup({
+      cache: false
+    });
+  },
+
   namespace: Ember.computed(function() {
     var parts = window.location.pathname.split('/').filter(function(i) {
       return i !== "";

http://git-wip-us.apache.org/repos/asf/ambari/blob/e1cb3b1e/contrib/views/files/src/main/resources/ui/app/components/upload-file.js
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/components/upload-file.js b/contrib/views/files/src/main/resources/ui/app/components/upload-file.js
index 96686db..8a14272 100644
--- a/contrib/views/files/src/main/resources/ui/app/components/upload-file.js
+++ b/contrib/views/files/src/main/resources/ui/app/components/upload-file.js
@@ -58,13 +58,19 @@ export default Ember.Component.extend(OperationModal, {
   _checkIfFileIsNotDirectory: function(file) {
     return new Ember.RSVP.Promise((resolve, reject) => {
 
+      let isSuccess = false;
+
       if (!Ember.isNone(file.size) && file.size <= 4096) { // Directories generally have less equal to 4096 bytes as size
         var reader = new FileReader();
         reader.onerror = function() {
-          return reject();
+          if(isSuccess) {
+            return;
+          }
+          return reject(reader.error);
         };
 
         reader.onloadstart = function() {
+          isSuccess = true;
           reader.abort();
           return resolve();
         };
@@ -92,7 +98,6 @@ export default Ember.Component.extend(OperationModal, {
     },
 
     fileLoaded: function(file) {
-
       this._checkIfFileIsNotDirectory(file).then(() => {
         var url = this.get('fileOperationService').getUploadUrl();
         var uploader = FileUploader.create({
@@ -118,8 +123,14 @@ export default Ember.Component.extend(OperationModal, {
             return false;
           });
         }
-      }, () => {
-        console.error("Cannot add a directory.");
+      }, (error) => {
+        console.error("Cannot add a directory.", error);
+        this.send('close');
+        let message = "Cannot add a directory ";
+        if(file && file.name) {
+          message = message + file.name;
+        }
+        this.get('logger').danger(message);
       });
 
     },


[17/50] ambari git commit: AMBARI-19989. Allow user to view Tez View after executing query (Venkata Sairam via pallavkul)

Posted by nc...@apache.org.
AMBARI-19989. Allow user to view Tez View after executing query (Venkata Sairam via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f080bd6c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f080bd6c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f080bd6c

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: f080bd6cb25d33a76265ee7d21c6e98788ef7423
Parents: efbd66b
Author: pallavkul <pa...@gmail.com>
Authored: Wed Feb 22 17:10:07 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Wed Feb 22 17:10:07 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/models/worksheet.js   |  3 +-
 .../resources/ui/app/routes/queries/query.js    | 20 ++++-
 .../resources/ui/app/services/tez-view-info.js  | 83 ++++++++++++++++++++
 .../src/main/resources/ui/app/styles/app.scss   |  5 ++
 .../ui/app/templates/queries/query.hbs          |  1 -
 .../ui/app/templates/queries/query/tez-ui.hbs   |  6 +-
 6 files changed, 111 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f080bd6c/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js b/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
index 4b3e1f9..f820ea0 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
@@ -40,5 +40,6 @@ export default DS.Model.extend({
   isQueryRunning: DS.attr('boolean', {defaultValue: false}),
   isQueryResultContainer: DS.attr('boolean', {defaultValue: false}),
   visualExplainJson: DS.attr({defaultValue: null}),
-  lastResultRoute: DS.attr({defaultValue: ""})
+  lastResultRoute: DS.attr({defaultValue: ""}),
+  tezUrl: DS.attr('string', {defaultValue: null})
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/f080bd6c/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index c549d7b..7860af1 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -27,6 +27,7 @@ export default Ember.Route.extend(UILoggerMixin, {
   isQueryEdidorPaneExpanded: false,
   isQueryResultPanelExpanded: false,
   globalSettings: '',
+  tezViewInfo: Ember.inject.service(),
 
   beforeModel(params){
     console.log('worksheetId', params.params['queries.query'].worksheetId);
@@ -74,7 +75,6 @@ export default Ember.Route.extend(UILoggerMixin, {
     } else {
       this.transitionTo('queries.query' + lastResultRoute);
     }
-
   },
 
   model(params) {
@@ -91,6 +91,7 @@ export default Ember.Route.extend(UILoggerMixin, {
   setupController(controller, model) {
 
     this._super(...arguments);
+    this.get("tezViewInfo").getTezViewInfo();
 
     let self = this;
     let alldatabases = this.store.findAll('database');
@@ -212,6 +213,11 @@ export default Ember.Route.extend(UILoggerMixin, {
       }
 
       let originalQuery = this.get('controller').get('currentQuery');
+      if(Ember.isBlank(originalQuery)) {
+        this.get('logger').danger('Query cannot be empty.');
+        this.send('resetDefaultWorksheet');
+        return;
+      }
       let queryInput = originalQuery;
 
       if (isVisualExplainQuery) {
@@ -235,6 +241,7 @@ export default Ember.Route.extend(UILoggerMixin, {
 
       this.get('controller.model').set('query', originalQuery);
 
+
       let dbid = this.get('controller.model').get('selectedDb');
       let worksheetTitle = this.get('controller.model').get('title');
 
@@ -272,8 +279,11 @@ export default Ember.Route.extend(UILoggerMixin, {
         self.get('jobs').waitForJobToComplete(data.job.id, 2 * 1000, false)
           .then((status) => {
             self.get('controller').set('isJobSuccess', true);
-            self.send('getJobResult', data, payload.title);
+            let jobDetails = self.store.peekRecord('job', data.job.id);
+            console.log(jobDetails);
+            self.send('getJobResult', data, payload.title, jobDetails);
             self.transitionTo('queries.query.loading');
+
           }, (error) => {
             console.log('error', error);
             self.get('logger').danger('Failed to execute query.', self.extractError(error));
@@ -287,7 +297,7 @@ export default Ember.Route.extend(UILoggerMixin, {
       });
     },
 
-    getJobResult(data, payloadTitle){
+    getJobResult(data, payloadTitle, jobDetails){
       let self = this;
 
       let isVisualExplainQuery = this.get('controller').get('isVisualExplainQuery');
@@ -305,7 +315,9 @@ export default Ember.Route.extend(UILoggerMixin, {
         if(existingWorksheets.get('length') > 0) {
           myWs = existingWorksheets.filterBy('title', payloadTitle).get('firstObject');
         }
-
+        if(!Ember.isBlank(jobDetails.get("dagId"))) {
+          self.get('controller.model').set('tezUrl', self.get("tezViewInfo").getTezViewURL() + jobDetails.get("dagId"));
+        }
         myWs.set('queryResult', data);
         myWs.set('isQueryRunning', false);
         myWs.set('hasNext', data.hasNext);

http://git-wip-us.apache.org/repos/asf/ambari/blob/f080bd6c/contrib/views/hive20/src/main/resources/ui/app/services/tez-view-info.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/tez-view-info.js b/contrib/views/hive20/src/main/resources/ui/app/services/tez-view-info.js
new file mode 100644
index 0000000..943d002
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/tez-view-info.js
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Service.extend({
+  tezViewURL: null,
+  tezApiURL: '/api/v1/views/TEZ',
+  tezURLPrefix: '/views/TEZ',
+  tezDagPath: '?viewPath=/#/dag/',
+  getTezViewInfo: function () {
+    if (this.get('isTezViewAvailable')) {
+      return;
+    }
+
+    var self = this;
+    Ember.$.getJSON(this.get('tezApiURL'))
+      .then(function (response) {
+        self.getTezViewInstance(response);
+      })
+      .fail(function (response) {
+        self.setTezViewError(response);
+      });
+  },
+
+  getTezViewInstance: function (data) {
+    var self = this;
+    var url = this.get('tezApiURL') + '/versions/' + data.versions[0].ViewVersionInfo.version;
+
+    Ember.$.getJSON(url)
+      .then(function (response) {
+        if (!response.instances.length) {
+          self.setTezViewError(response);
+          return;
+        }
+
+        self.set('isTezViewAvailable', true);
+
+        var instance = response.instances[0].ViewInstanceInfo;
+        self.setTezViewURL(instance);
+      });
+  },
+
+  setTezViewURL: function (instance) {
+    var url = "%@/%@/%@/".fmt(
+      this.get('tezURLPrefix'),
+      instance.version,
+      instance.instance_name
+    );
+    this.set('tezViewURL', url);
+  },
+  setTezViewError: function (data) {
+    // status: 404 => Tev View isn't deployed
+    if (data.status && data.status === 404) {
+      this.set('error', 'tez.errors.not.deployed');
+      return;
+    }
+
+    // no instance created
+    if (data.instances && !data.instances.length) {
+      this.set('error', 'tez.errors.no.instance');
+      return;
+    }
+  },
+  getTezViewURL(){
+    return this.get('tezViewURL') + this.get("tezDagPath");
+  }
+});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f080bd6c/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
index 5371aa3..2824dc6 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
@@ -943,3 +943,8 @@ rect.operator__box {
   min-height: 100px;
   font-size: 16px;
 }
+
+.tez-ui {
+  width:100%;
+  height:600px;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f080bd6c/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
index bf485ca..4fd3ce201 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
@@ -131,4 +131,3 @@
   {{/modal-dialog}}
 {{/if}}
 
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/f080bd6c/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/tez-ui.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/tez-ui.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/tez-ui.hbs
index 9dfa3fa..9bf2b2a 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/tez-ui.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query/tez-ui.hbs
@@ -16,4 +16,8 @@
 * limitations under the License.
 }}
 
-<div>I am in tez-ui route.</div>
\ No newline at end of file
+{{#if model.tezUrl}}
+<iframe  class="tez-ui" src={{model.tezUrl}}></iframe>
+{{else}}
+<div>Tez UI not available.</div>
+{{/if}}


[08/50] ambari git commit: AMBARI-20057. "NodeManagers Status" value is empty in Yarn Summary page (alexantonenko)

Posted by nc...@apache.org.
AMBARI-20057. "NodeManagers Status" value is empty in Yarn Summary page (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bc4b8bce
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bc4b8bce
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bc4b8bce

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: bc4b8bcedb8f4de66c7e7ccf148653fcc53417f7
Parents: d5cbe19
Author: Alex Antonenko <hi...@gmail.com>
Authored: Wed Feb 22 04:33:34 2017 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Wed Feb 22 04:33:34 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/messages.js                         | 1 +
 ambari-web/app/views/main/service/services/yarn.js | 1 +
 2 files changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bc4b8bce/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 44bf13e..a06ff27 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -2937,6 +2937,7 @@ Em.I18n.translations = {
   'dashboard.services.yarn.resourceManager.standby':'Standby ResourceManager',
   'dashboard.services.resourceManager.nodes.heap':'ResourceManager Heap',
   'dashboard.services.yarn.nodeManagers.status': 'NodeManagers Status',
+  'dashboard.services.yarn.nodeManagers.status.msg': '{0} active / {1} lost / {2} unhealthy / {3} rebooted / {4} decommissioned',
   'dashboard.services.yarn.nodeManagers.status.active': 'active',
   'dashboard.services.yarn.nodeManagers.status.lost': 'lost',
   'dashboard.services.yarn.nodeManagers.status.unhealthy': 'unhealthy',

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc4b8bce/ambari-web/app/views/main/service/services/yarn.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/services/yarn.js b/ambari-web/app/views/main/service/services/yarn.js
index 84ba520..11edd22 100644
--- a/ambari-web/app/views/main/service/services/yarn.js
+++ b/ambari-web/app/views/main/service/services/yarn.js
@@ -55,6 +55,7 @@ App.MainDashboardServiceYARNView = App.MainDashboardServiceView.extend({
   _nmUnhealthy: Em.computed.formatUnavailable('service.nodeManagersCountUnhealthy'),
   _nmRebooted: Em.computed.formatUnavailable('service.nodeManagersCountRebooted'),
   _nmDecom: Em.computed.formatUnavailable('service.nodeManagersCountDecommissioned'),
+  nodeManagersStatus: Em.computed.i18nFormat('dashboard.services.yarn.nodeManagers.status.msg', '_nmActive', '_nmLost', '_nmUnhealthy', '_nmRebooted', '_nmDecom'),
 
   _allocated: Em.computed.formatUnavailable('service.containersAllocated'),
   _pending: Em.computed.formatUnavailable('service.containersPending'),


[27/50] ambari git commit: AMBARI-15754. configs.sh expands ***** in config values to a local file list, causing broken config files (Asger Askov Blekinge via alejandro)

Posted by nc...@apache.org.
AMBARI-15754. configs.sh expands ***** in config values to a local file list, causing broken config files (Asger Askov Blekinge via alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e9c94390
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e9c94390
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e9c94390

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: e9c943905721ef431dc09b45abaf4a05312d4fe5
Parents: f7acf8e
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Wed Feb 22 16:03:40 2017 -0800
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Wed Feb 22 17:21:13 2017 -0800

----------------------------------------------------------------------
 ambari-server/src/main/resources/scripts/configs.sh | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e9c94390/ambari-server/src/main/resources/scripts/configs.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/configs.sh b/ambari-server/src/main/resources/scripts/configs.sh
index da88520..eeb53c5 100755
--- a/ambari-server/src/main/resources/scripts/configs.sh
+++ b/ambari-server/src/main/resources/scripts/configs.sh
@@ -168,7 +168,7 @@ doConfigUpdate () {
           finalJson="{ \"Clusters\": { \"desired_config\": {\"type\": \"$SITE\", \"tag\":\"$newTag\", $newProperties}}}"
           newFile="doSet_$newTag.json"
           echo "########## Putting json into: $newFile"
-          echo $finalJson > $newFile
+          echo "$finalJson" > $newFile
           curl -k -u $USERID:$PASSWD -X PUT -H "X-Requested-By: ambari" "$AMBARIURL/api/v1/clusters/$CLUSTER" --data @$newFile
           currentSiteTag
           echo "########## NEW Site:$SITE, Tag:$SITETAG";
@@ -193,7 +193,7 @@ doConfigFileUpdate () {
       newProperties=`cat $FILENAME`;
       finalJson="{ \"Clusters\": { \"desired_config\": {\"type\": \"$SITE\", \"tag\":\"$newTag\", $newProperties}}}"
       newFile="doSet_$newTag.json"
-      echo $finalJson>$newFile
+      echo "$finalJson" > $newFile
       echo "########## PUTting file:\"$FILENAME\" into config(type:\"$SITE\", tag:$newTag) via $newFile"
       curl -k -u $USERID:$PASSWD -X PUT -H "X-Requested-By: ambari" "$AMBARIURL/api/v1/clusters/$CLUSTER" --data @$newFile
       currentSiteTag
@@ -240,9 +240,9 @@ doGet () {
     fi
     if [ "$propertiesStarted" -gt "0" ]; then
       if [ -z $FILENAME ]; then
-        echo $line
+        echo "$line"
       else
-        echo $line >> $FILENAME
+        echo "$line" >> $FILENAME
       fi
     fi
     if [ "`echo $line | grep -E "{$"`" ]; then


[33/50] ambari git commit: AMBARI-20133. Changes/Overwrites to Bundles and Coordinators Show as Workflow, Not able to select text from worflow 'Definition' tab (Venkata Sairam via pallavkul)

Posted by nc...@apache.org.
AMBARI-20133. Changes/Overwrites to Bundles and Coordinators Show as Workflow, Not able to select text from worflow 'Definition' tab (Venkata Sairam via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9c6c20f2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9c6c20f2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9c6c20f2

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 9c6c20f23dc077d52add8cef1ac70c6add89b3f7
Parents: 82230d6
Author: pallavkul <pa...@gmail.com>
Authored: Thu Feb 23 12:41:43 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Thu Feb 23 12:41:43 2017 +0530

----------------------------------------------------------------------
 .../wfmanager/src/main/resources/ui/app/components/save-wf.js   | 2 +-
 .../views/wfmanager/src/main/resources/ui/app/styles/app.less   | 5 +++--
 .../src/main/resources/ui/app/templates/components/save-wf.hbs  | 4 ++--
 3 files changed, 6 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9c6c20f2/contrib/views/wfmanager/src/main/resources/ui/app/components/save-wf.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/save-wf.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/save-wf.js
index 17ee0fd..01da55b 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/save-wf.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/save-wf.js
@@ -97,7 +97,7 @@ export default Ember.Component.extend(Validations, {
     this.get("saveJobService").saveWorkflow(url, workflowData).promise.then(function(response){
         self.showNotification({
           "type": "success",
-          "message": "Workflow have been saved"
+          "message": this.get("displayName")+" have been saved"
         });
         self.set("savingInProgress",false);
         this.set('jobFilePath', this.get('filePath'));

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c6c20f2/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
index 86c1dd3..e98d182 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
@@ -850,12 +850,10 @@ height: 100vh;
 
 .preview pre.prettyprint {
   border: none;
-  background: #fff;
 }
 
 .preview pre {
   border: none;
-  background: #fff;
 }
 
 .date-picker {
@@ -1758,3 +1756,6 @@ height: 100vh;
  .CodeMirror{
    height: auto;
  }
+ pre {
+   background-color : white;
+ }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c6c20f2/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/save-wf.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/save-wf.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/save-wf.hbs
index aee2072..d6937ed 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/save-wf.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/save-wf.hbs
@@ -20,7 +20,7 @@
     <div class="modal-content">
       <div class="modal-header">
         <button type="button" class="close" data-dismiss="modal">&times;</button>
-        <h4 class="modal-title">Save Workflow</h4>
+        <h4 class="modal-title">Save {{displayName}}</h4>
       </div>
       <div class="modal-body">
         <div class="panel panel-default">
@@ -41,7 +41,7 @@
             </div>
             {{/if}}
             <div class="row form-group">
-              <div class="col-xs-3"> 
+              <div class="col-xs-3">
                 <label class="control-label" for="{{type}}-path">{{displayName}} path</label><span class="requiredField">&nbsp;*</span>
               </div>
               <div class="col-xs-8">


[36/50] ambari git commit: AMBARI-19797. ambari-server SuSE11 init script not working in certain environments (aonishuk)

Posted by nc...@apache.org.
AMBARI-19797. ambari-server SuSE11 init script not working in certain environments (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/aeb5d4e5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/aeb5d4e5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/aeb5d4e5

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: aeb5d4e550cecd7ee93aa968669c5e54ae2226dc
Parents: 83cdcea
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Feb 23 13:37:50 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Feb 23 13:37:50 2017 +0200

----------------------------------------------------------------------
 ambari-agent/etc/init.d/ambari-agent | 1 +
 ambari-server/sbin/ambari-server     | 2 +-
 2 files changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/aeb5d4e5/ambari-agent/etc/init.d/ambari-agent
----------------------------------------------------------------------
diff --git a/ambari-agent/etc/init.d/ambari-agent b/ambari-agent/etc/init.d/ambari-agent
index 2d597a9..044f86d 100644
--- a/ambari-agent/etc/init.d/ambari-agent
+++ b/ambari-agent/etc/init.d/ambari-agent
@@ -21,6 +21,7 @@
 
 ### BEGIN INIT INFO
 # Provides:          ambari-agent
+# Required-Start:    $local_fs $remote_fs $network
 # Required-Start:
 # Required-Stop:
 # Default-Start:     2 3 4 5

http://git-wip-us.apache.org/repos/asf/ambari/blob/aeb5d4e5/ambari-server/sbin/ambari-server
----------------------------------------------------------------------
diff --git a/ambari-server/sbin/ambari-server b/ambari-server/sbin/ambari-server
index 205f272..67e8aea 100755
--- a/ambari-server/sbin/ambari-server
+++ b/ambari-server/sbin/ambari-server
@@ -21,7 +21,7 @@
 
 ### BEGIN INIT INFO
 # Provides:          ambari-server
-# Required-Start:    $ALL
+# Required-Start:    $local_fs $remote_fs $network
 # Required-Start:
 # Required-Stop:
 # Default-Start:     2 3 4 5


[46/50] ambari git commit: AMBARI-2003 LogFeeder Simulator Enhancements for 3000-node cluster testing (mgergely)

Posted by nc...@apache.org.
AMBARI-2003 LogFeeder Simulator Enhancements for 3000-node cluster testing (mgergely)

Change-Id: I828c1804b4b118e2535da8c50ae0f4e7fc5798ce


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b68bb74c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b68bb74c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b68bb74c

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b68bb74c47ee4a824d1b9d4fc243db31d185eb41
Parents: 9fdeec1
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Thu Feb 23 15:08:33 2017 +0100
Committer: Miklos Gergely <mg...@hortonworks.com>
Committed: Thu Feb 23 15:08:33 2017 +0100

----------------------------------------------------------------------
 .../org/apache/ambari/logfeeder/LogFeeder.java  | 13 ++++++--
 .../ambari/logfeeder/common/ConfigBlock.java    |  2 +-
 .../apache/ambari/logfeeder/filter/Filter.java  |  4 +--
 .../ambari/logfeeder/input/InputSimulate.java   | 31 ++++++++++++++++++--
 4 files changed, 42 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b68bb74c/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
index 24651ba..d584890 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
@@ -245,7 +245,7 @@ public class LogFeeder {
 
       // We will only check for is_enabled out here. Down below we will check whether this output is enabled for the input
       if (output.getBooleanValue("is_enabled", true)) {
-        output.logConfgs(Level.INFO);
+        output.logConfigs(Level.INFO);
         outputManager.add(output);
       } else {
         LOG.info("Output is disabled. So ignoring it. " + output.getShortDescription());
@@ -277,7 +277,7 @@ public class LogFeeder {
         input.setOutputManager(outputManager);
         input.setInputManager(inputManager);
         inputManager.add(input);
-        input.logConfgs(Level.INFO);
+        input.logConfigs(Level.INFO);
       } else {
         LOG.info("Input is disabled. So ignoring it. " + input.getShortDescription());
       }
@@ -311,7 +311,7 @@ public class LogFeeder {
         if (filter.isEnabled()) {
           filter.setOutputManager(outputManager);
           input.addFilter(filter);
-          filter.logConfgs(Level.INFO);
+          filter.logConfigs(Level.INFO);
         } else {
           LOG.debug("Ignoring filter " + filter.getShortDescription() + " for input " + input.getShortDescription());
         }
@@ -371,6 +371,13 @@ public class LogFeeder {
         }
       }
     }
+    
+    // In case of simulation copies of the output are added for each simulation instance, these must be added to the manager
+    for (Output output : InputSimulate.getSimulateOutputs()) {
+      outputManager.add(output);
+      usedOutputSet.add(output);
+    }
+    
     outputManager.retainUsedOutputs(usedOutputSet);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b68bb74c/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
index 47ddc51..68897e8 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
@@ -223,7 +223,7 @@ public abstract class ConfigBlock {
     logStatForMetric(statMetric, "Stat");
   }
 
-  public boolean logConfgs(Priority level) {
+  public boolean logConfigs(Priority level) {
     if (level.toInt() == Priority.INFO_INT && !LOG.isInfoEnabled()) {
       return false;
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b68bb74c/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
index 684f3c4..afd903e 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
@@ -173,8 +173,8 @@ public abstract class Filter extends ConfigBlock {
   }
 
   @Override
-  public boolean logConfgs(Priority level) {
-    if (!super.logConfgs(level)) {
+  public boolean logConfigs(Priority level) {
+    if (!super.logConfigs(level)) {
       return false;
     }
     LOG.log(level, "input=" + input.getShortDescription());

http://git-wip-us.apache.org/repos/asf/ambari/blob/b68bb74c/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
index be97a52..2222f93 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
@@ -29,17 +29,21 @@ import java.util.Map;
 import java.util.Random;
 import java.util.Set;
 import java.util.TreeSet;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.filter.FilterJSON;
+import org.apache.ambari.logfeeder.output.Output;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.log4j.Logger;
 import org.apache.solr.common.util.Base64;
 
 import com.google.common.base.Joiner;
 
 public class InputSimulate extends Input {
+  private static final Logger LOG = Logger.getLogger(InputSimulate.class);
 
-  private static final String LOG_TEXT_PATTERN = "{ logtime=\"%d\", level=\"%s\", log_message=\"%s\"}";
+  private static final String LOG_TEXT_PATTERN = "{ logtime=\"%d\", level=\"%s\", log_message=\"%s\", host=\"%s\"}";
   
   private static final Map<String, String> typeToFilePath = new HashMap<>();
   public static void loadTypeToFilePath(List<Map<String, Object>> inputList) {
@@ -52,6 +56,13 @@ public class InputSimulate extends Input {
   
   private static final Map<String, Integer> typeToLineNumber = new HashMap<>();
   
+  private static final AtomicInteger hostNumber = new AtomicInteger(0);
+  
+  private static final List<Output> simulateOutputs = new ArrayList<>();
+  public static List<Output> getSimulateOutputs() {
+    return simulateOutputs;
+  }
+  
   private final Random random = new Random(System.currentTimeMillis());
   
   private final List<String> types;
@@ -60,6 +71,7 @@ public class InputSimulate extends Input {
   private final int minLogWords;
   private final int maxLogWords;
   private final long sleepMillis;
+  private final String host;
   
   public InputSimulate() throws Exception {
     this.types = getSimulatedLogTypes();
@@ -68,6 +80,7 @@ public class InputSimulate extends Input {
     this.minLogWords = LogFeederUtil.getIntProperty("logfeeder.simulate.min_log_words", 5, 1, 10);
     this.maxLogWords = LogFeederUtil.getIntProperty("logfeeder.simulate.max_log_words", 10, 10, 20);
     this.sleepMillis = LogFeederUtil.getIntProperty("logfeeder.simulate.sleep_milliseconds", 10000);
+    this.host = "#" + hostNumber.incrementAndGet() + "-" + LogFeederUtil.hostName;
     
     Filter filter = new FilterJSON();
     filter.loadConfig(Collections.<String, Object> emptyMap());
@@ -87,6 +100,20 @@ public class InputSimulate extends Input {
   }
 
   @Override
+  public void addOutput(Output output) {
+    try {
+      Class<? extends Output> clazz = output.getClass();
+      Output outputCopy = clazz.newInstance();
+      outputCopy.loadConfig(output.getConfigs());
+      simulateOutputs.add(outputCopy);
+      super.addOutput(outputCopy);
+    } catch (Exception e) {
+      LOG.warn("Could not copy Output class " + output.getClass() + ", using original output");
+      super.addOutput(output);
+    }
+  }
+
+  @Override
   public boolean isReady() {
     return true;
   }
@@ -143,7 +170,7 @@ public class InputSimulate extends Input {
   private String getLine() {
     Date d = new Date();
     String logMessage = createLogMessage();
-    return String.format(LOG_TEXT_PATTERN, d.getTime(), level, logMessage);
+    return String.format(LOG_TEXT_PATTERN, d.getTime(), level, logMessage, host);
   }
   
   private String createLogMessage() {


[44/50] ambari git commit: AMBARI-20085 Confusing AMS collector heap size validation loop (dsen)

Posted by nc...@apache.org.
AMBARI-20085 Confusing AMS collector heap size validation loop (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bb7b83f2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bb7b83f2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bb7b83f2

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: bb7b83f2bb1b6af2dad38e98ffdcddda13e1cb2b
Parents: 986e7a9
Author: Dmytro Sen <ds...@apache.org>
Authored: Thu Feb 23 15:52:02 2017 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Thu Feb 23 15:52:21 2017 +0200

----------------------------------------------------------------------
 .../stacks/HDP/2.0.6/services/stack_advisor.py      |  1 +
 .../stacks/HDP/2.5/services/stack_advisor.py        |  8 ++++++++
 .../src/main/resources/stacks/stack_advisor.py      | 12 ++++++------
 .../python/stacks/2.5/common/test_stack_advisor.py  | 16 ++++++++++++++++
 .../src/test/resources/stacks/old_stack_advisor.py  | 12 ++++++------
 .../stacks/HDF/2.0/services/stack_advisor.py        |  2 +-
 6 files changed, 38 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bb7b83f2/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 5c68f15..61960bb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -1150,6 +1150,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
 
           if unusedMemory > 4*gb and collector_needs_increase:  # warn user, if more than 4GB RAM is unused
             recommended_collector_heapsize = int((unusedMemory - 4*gb)/5) + collector_heapsize * mb
+            recommended_collector_heapsize = min(16*gb, recommended_collector_heapsize) #Make sure heapsize <= 16GB
             recommended_collector_heapsize = round_to_n(recommended_collector_heapsize/mb,128) # Round to 128m multiple
             if collector_heapsize < recommended_collector_heapsize:
               validation_msg = "Consider allocating {0} MB to metrics_collector_heapsize in ams-env to use up some " \

http://git-wip-us.apache.org/repos/asf/ambari/blob/bb7b83f2/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 52ada52..6f3dfa7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -142,6 +142,14 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
 
     return self.toConfigurationValidationProblems(validationItems, "storm-site")
 
+  def getCardinalitiesDict(self, hosts):
+    result = super(HDP25StackAdvisor, self).getCardinalitiesDict(hosts)
+    min_val = 1
+    if len(hosts["items"]) > 999:
+      min_val = 2
+    result['METRICS_COLLECTOR'] = {"min": min_val}
+    return result
+
   def validateAtlasConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
     application_properties = self.getSiteProperties(configurations, "application-properties")
     validationItems = []

http://git-wip-us.apache.org/repos/asf/ambari/blob/bb7b83f2/ambari-server/src/main/resources/stacks/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/stack_advisor.py b/ambari-server/src/main/resources/stacks/stack_advisor.py
index 04c6baf..0b81700 100644
--- a/ambari-server/src/main/resources/stacks/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/stack_advisor.py
@@ -806,7 +806,7 @@ class DefaultStackAdvisor(StackAdvisor):
       return component["StackServiceComponents"]["hostnames"]
 
     if len(hostsList) > 1 and self.isMasterComponentWithMultipleInstances(component):
-      hostsCount = self.getMinComponentCount(component)
+      hostsCount = self.getMinComponentCount(component, hosts)
       if hostsCount > 1: # get first 'hostsCount' available hosts
         hostsForComponent = []
         hostIndex = 0
@@ -1460,13 +1460,13 @@ class DefaultStackAdvisor(StackAdvisor):
     service = self.getNotValuableComponents()
     return componentName in service
 
-  def getMinComponentCount(self, component):
+  def getMinComponentCount(self, component, hosts):
     componentName = self.getComponentName(component)
-    return self.getComponentCardinality(componentName)["min"]
+    return self.getComponentCardinality(componentName, hosts)["min"]
 
   # Helper dictionaries
-  def getComponentCardinality(self, componentName):
-    dict = self.getCardinalitiesDict()
+  def getComponentCardinality(self, componentName, hosts):
+    dict = self.getCardinalitiesDict(hosts)
     if componentName in dict:
       return dict[componentName]
     else:
@@ -1508,7 +1508,7 @@ class DefaultStackAdvisor(StackAdvisor):
   def getNotPreferableOnServerComponents(self):
     return self.notPreferableOnServerComponents
 
-  def getCardinalitiesDict(self):
+  def getCardinalitiesDict(self, hosts):
     return self.cardinalitiesDict
 
   def getComponentLayoutSchemes(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/bb7b83f2/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index 6890ef6..cf1c0ee 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -404,6 +404,12 @@ class TestHDP25StackAdvisor(TestCase):
       data = json.load(f)
     return data
 
+  def prepareNHosts(self, host_count):
+    names = []
+    for i in range(0, host_count):
+      names.append("hostname" + str(i))
+    return self.prepareHosts(names)
+
   def prepareHosts(self, hostsNames):
     hosts = { "items": [] }
     for hostName in hostsNames:
@@ -435,6 +441,16 @@ class TestHDP25StackAdvisor(TestCase):
   def __getHosts(self, componentsList, componentName):
     return [component["StackServiceComponents"] for component in componentsList if component["StackServiceComponents"]["component_name"] == componentName][0]
 
+  def test_getCardinalitiesDict(self):
+    hosts = self.prepareNHosts(5)
+    actual = self.stackAdvisor.getCardinalitiesDict(hosts)
+    expected = {'METRICS_COLLECTOR': {'min': 1}}
+    self.assertEquals(actual, expected)
+
+    hosts = self.prepareNHosts(1001)
+    actual = self.stackAdvisor.getCardinalitiesDict(hosts)
+    expected = {'METRICS_COLLECTOR': {'min': 2}}
+    self.assertEquals(actual, expected)
 
   def test_getComponentLayoutValidations_one_hsi_host(self):
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/bb7b83f2/ambari-server/src/test/resources/stacks/old_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/old_stack_advisor.py b/ambari-server/src/test/resources/stacks/old_stack_advisor.py
index 8a880e0..ee5674b 100644
--- a/ambari-server/src/test/resources/stacks/old_stack_advisor.py
+++ b/ambari-server/src/test/resources/stacks/old_stack_advisor.py
@@ -359,7 +359,7 @@ class DefaultStackAdvisor(StackAdvisor):
             availableHosts = [hostName for hostName in hostsList if not self.isLocalHost(hostName)]
 
           if self.isMasterComponentWithMultipleInstances(component):
-            hostsCount = self.getMinComponentCount(component)
+            hostsCount = self.getMinComponentCount(component, hosts)
             if hostsCount > 1: # get first 'hostsCount' available hosts
               if len(availableHosts) < hostsCount:
                 hostsCount = len(availableHosts)
@@ -524,13 +524,13 @@ class DefaultStackAdvisor(StackAdvisor):
     service = self.getNotValuableComponents()
     return componentName in service
 
-  def getMinComponentCount(self, component):
+  def getMinComponentCount(self, component, hosts):
     componentName = self.getComponentName(component)
-    return self.getComponentCardinality(componentName)["min"]
+    return self.getComponentCardinality(componentName, hosts)["min"]
 
   # Helper dictionaries
-  def getComponentCardinality(self, componentName):
-    return self.getCardinalitiesDict().get(componentName, {"min": 1, "max": 1})
+  def getComponentCardinality(self, componentName, hosts):
+    return self.getCardinalitiesDict(hosts).get(componentName, {"min": 1, "max": 1})
 
   def getHostForComponent(self, component, hostsList):
     componentName = self.getComponentName(component)
@@ -568,7 +568,7 @@ class DefaultStackAdvisor(StackAdvisor):
   def getNotPreferableOnServerComponents(self):
     return []
 
-  def getCardinalitiesDict(self):
+  def getCardinalitiesDict(self, hosts):
     return {}
 
   def getComponentLayoutSchemes(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/bb7b83f2/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/services/stack_advisor.py b/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/services/stack_advisor.py
index 40cc847..da33b95 100644
--- a/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/services/stack_advisor.py
+++ b/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/services/stack_advisor.py
@@ -1615,7 +1615,7 @@ class HDF20StackAdvisor(DefaultStackAdvisor):
   def getNotPreferableOnServerComponents(self):
     return ['STORM_UI_SERVER', 'DRPC_SERVER', 'STORM_REST_API', 'NIMBUS', 'METRICS_COLLECTOR']
 
-  def getCardinalitiesDict(self):
+  def getCardinalitiesDict(self, hosts):
     return {
       'ZOOKEEPER_SERVER': {"min": 3},
       'METRICS_COLLECTOR': {"min": 1}


[25/50] ambari git commit: AMBARI-20118 Test and fix new modals styles on Ambari. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-20118 Test and fix new modals styles on Ambari. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0b6d1cd3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0b6d1cd3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0b6d1cd3

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 0b6d1cd3b311389e8d385ab782ae78a65b14ef8b
Parents: a6f3420
Author: ababiichuk <ab...@hortonworks.com>
Authored: Wed Feb 22 20:19:25 2017 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Wed Feb 22 20:19:25 2017 +0200

----------------------------------------------------------------------
 .../main/admin/stack_and_upgrade_controller.js  |  1 -
 .../wizard/step7/assign_master_controller.js    | 13 +++-----
 .../app/controllers/wizard/step9_controller.js  |  7 ++---
 ambari-web/app/routes/main.js                   |  4 +--
 .../app/styles/enhanced_service_dashboard.less  |  1 -
 ambari-web/app/styles/modal_popups.less         | 11 ++-----
 ambari-web/app/styles/stack_versions.less       |  2 +-
 .../app/styles/theme/bootstrap-ambari.css       |  6 ++--
 .../common/assign_master_components.hbs         | 33 ++++++++++----------
 .../configs/propertyDependence_footer.hbs       | 25 ---------------
 ambari-web/app/templates/common/modal_popup.hbs | 26 +++++++++------
 ambari-web/app/templates/common/progress.hbs    | 16 +++++-----
 .../main/service/info/save_popup_footer.hbs     | 13 ++++++--
 .../step3/step3_host_warning_popup_footer.hbs   | 14 +++++----
 .../common/assign_master_components_view.js     |  6 ++--
 .../configs/service_configs_by_category_view.js | 15 +++++----
 .../views/main/admin/kerberos/disable_view.js   |  2 +-
 .../views/wizard/step7/assign_master_view.js    |  6 +---
 18 files changed, 86 insertions(+), 115 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index cb02665..35b2ecc 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -1022,7 +1022,6 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
       secondary: function() {
         return this.get('controller.getSupportedUpgradeError') ? null : Em.I18n.t('common.cancel');
       }.property('controller.getSupportedUpgradeError'),
-      primaryClass: 'btn-success',
       classNames: ['upgrade-options-popup'],
       header: Em.I18n.t('admin.stackVersions.version.upgrade.upgradeOptions.header'),
       controller: this,

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/controllers/wizard/step7/assign_master_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step7/assign_master_controller.js b/ambari-web/app/controllers/wizard/step7/assign_master_controller.js
index 81c80e1..e635f41 100644
--- a/ambari-web/app/controllers/wizard/step7/assign_master_controller.js
+++ b/ambari-web/app/controllers/wizard/step7/assign_master_controller.js
@@ -117,8 +117,10 @@ App.AssignMasterOnStep7Controller = Em.Controller.extend(App.BlueprintMixin, App
       bodyClass: App.AssignMasterOnStep7View.extend({
         controller: self
       }),
-      primary: Em.I18n.t('form.cancel'),
-      showFooter: false,
+      primary: Em.I18n.t('common.select'),
+      onSecondary: function() {
+        this.showWarningPopup();
+      },
       onClose: function () {
         this.showWarningPopup();
       },
@@ -402,12 +404,5 @@ App.AssignMasterOnStep7Controller = Em.Controller.extend(App.BlueprintMixin, App
       configActionComponent.hostName = componentHostName;
       self.get('configWidgetContext.config').set('configActionComponent', configActionComponent);
     });
-  },
-
-  /**
-   * function called for onclcik event on cancel button for the popup
-   */
-  onCancel: function() {
-    this.get('popup').showWarningPopup();
   }
 });
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/controllers/wizard/step9_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step9_controller.js b/ambari-web/app/controllers/wizard/step9_controller.js
index f63bd79..a009334 100644
--- a/ambari-web/app/controllers/wizard/step9_controller.js
+++ b/ambari-web/app/controllers/wizard/step9_controller.js
@@ -606,11 +606,8 @@ App.WizardStep9Controller = Em.Controller.extend(App.ReloadPopupMixin, {
       encodeBody: false,
       primary: Em.I18n.t('ok'),
       header: Em.I18n.t('installer.step9.service.start.header'),
-      secondaryClass: "hide",
-      body: Em.I18n.t('installer.step9.service.start.failed'),
-      primaryClass: 'btn-success',
-      onPrimary: function() { this.hide(); },
-      onClose: function() { this.hide(); }
+      secondary: false,
+      body: Em.I18n.t('installer.step9.service.start.failed')
     });
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/routes/main.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/main.js b/ambari-web/app/routes/main.js
index 14435e8..9730de2 100644
--- a/ambari-web/app/routes/main.js
+++ b/ambari-web/app/routes/main.js
@@ -429,9 +429,9 @@ module.exports = Em.Route.extend(App.RouterRedirections, {
               bodyClass: App.KerberosDisableView.extend({
                 controllerBinding: 'App.router.kerberosDisableController'
               }),
-              primary: Em.I18n.t('form.cancel'),
+              primary: Em.I18n.t('common.complete'),
               secondary: null,
-              showFooter: false,
+              disablePrimary: Em.computed.alias('App.router.kerberosDisableController.isSubmitDisabled'),
 
               onClose: function () {
                 var self = this;

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/styles/enhanced_service_dashboard.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/enhanced_service_dashboard.less b/ambari-web/app/styles/enhanced_service_dashboard.less
index dfbe042..1dd645b 100644
--- a/ambari-web/app/styles/enhanced_service_dashboard.less
+++ b/ambari-web/app/styles/enhanced_service_dashboard.less
@@ -522,7 +522,6 @@
 
 .widgets-browser-popup {
   .modal {
-    position: fixed;
     .modal-body {
       padding-top: 0;
       min-height: 290px;

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/styles/modal_popups.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/modal_popups.less b/ambari-web/app/styles/modal_popups.less
index a6a0196..e269791 100644
--- a/ambari-web/app/styles/modal_popups.less
+++ b/ambari-web/app/styles/modal_popups.less
@@ -77,10 +77,6 @@
   overflow-y: auto;
 }
 
-.modal-header h4 {
-  margin: 0;
-}
-
 /*90% width wizard modal window start*/
 .wizard-modal-wrapper {
   .modal {
@@ -202,7 +198,6 @@
   .modal-footer {
     .footer-checkbox {
       text-align: left;
-      padding-top: 11px;
       margin-left: 22px;
     }
   }
@@ -395,14 +390,14 @@
 
 .modal-graph-line {
   .modal-dialog {
-    width: 810px;
+    width: 820px;
   }
   .modal-body {
     min-height: 450px !important;
     overflow: hidden;
     .corner-icon {
       position: absolute;
-      right: 15px;
+      right: 20px;
       text-decoration: none;
       .glyphicon-save {
         color: #555;
@@ -410,7 +405,7 @@
     }
     .export-graph-list {
       top: auto;
-      right: 15px;
+      right: 20px;
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/styles/stack_versions.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/stack_versions.less b/ambari-web/app/styles/stack_versions.less
index 0e1d2d0..2d5c2b4 100644
--- a/ambari-web/app/styles/stack_versions.less
+++ b/ambari-web/app/styles/stack_versions.less
@@ -404,7 +404,7 @@
     position: absolute;
     cursor: pointer;
     right: 10%;
-    top: 13px;
+    top: 22px;
     width: 100px;
     a {
       font-size: 13px;

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/styles/theme/bootstrap-ambari.css
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/theme/bootstrap-ambari.css b/ambari-web/app/styles/theme/bootstrap-ambari.css
index 2c84f88..d88a1ed 100644
--- a/ambari-web/app/styles/theme/bootstrap-ambari.css
+++ b/ambari-web/app/styles/theme/bootstrap-ambari.css
@@ -1323,11 +1323,13 @@ input.radio:checked + label:after {
 .modal .modal-content .modal-header {
   border-bottom: none;
   padding-top: 20px;
+  color: #666;
+  font-size: 20px;
 }
 .modal .modal-content .modal-header h4 {
   margin: 0;
-  color: #666;
-  font-size: 20px;
+  color: inherit;
+  font-size: inherit;
 }
 .modal .modal-content .modal-body {
   color: #666;

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/templates/common/assign_master_components.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/assign_master_components.hbs b/ambari-web/app/templates/common/assign_master_components.hbs
index d5eaf75..72568a4 100644
--- a/ambari-web/app/templates/common/assign_master_components.hbs
+++ b/ambari-web/app/templates/common/assign_master_components.hbs
@@ -160,24 +160,23 @@
   </div>
 </div>
 
-<div class="wizard-footer col-md-12">
-  <div class="btn-area">
-    {{#if view.isBackButtonVisible}}
-      <button type="button" class="btn btn-default pull-left installer-back-btn" {{bindAttr disabled="App.router.btnClickInProgress"}} {{action back}}>
-        &larr; {{t common.back}}
-        {{#if App.router.backBtnClickInProgress}}
+{{#if view.isWizardStep}}
+  <div class="wizard-footer col-md-12">
+    <div class="btn-area">
+      {{#if view.isBackButtonVisible}}
+        <button type="button" class="btn btn-default pull-left installer-back-btn" {{bindAttr disabled="App.router.btnClickInProgress"}} {{action back}}>
+          &larr; {{t common.back}}
+          {{#if App.router.backBtnClickInProgress}}
+            {{view App.SpinnerView tagName="span" classNames="service-button-spinner"}}
+          {{/if}}
+        </button>
+      {{/if}}
+      <button type="button" class="btn btn-success pull-right" {{bindAttr disabled="nextButtonDisabled"}} {{action submit target="controller"}}>
+        {{#if App.router.nextBtnClickInProgress}}
           {{view App.SpinnerView tagName="span" classNames="service-button-spinner"}}
         {{/if}}
+        {{t common.next}} &rarr;
       </button>
-    {{/if}}
-    <button type="button" class="btn btn-success pull-right" {{bindAttr disabled="nextButtonDisabled"}} {{action submit target="controller"}}>
-      {{#if App.router.nextBtnClickInProgress}}
-        {{view App.SpinnerView tagName="span" classNames="service-button-spinner"}}
-      {{/if}}
-      {{view.acceptButtonText}}
-    </button>
-    {{#if view.isCancelButtonVisible}}
-      <button class="btn btn-default pull-right mrm" {{action onCancel target="controller"}}>{{t common.cancel}}</button>
-    {{/if}}
+    </div>
   </div>
-</div>
+{{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/templates/common/configs/propertyDependence_footer.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/configs/propertyDependence_footer.hbs b/ambari-web/app/templates/common/configs/propertyDependence_footer.hbs
deleted file mode 100644
index ee03ad4..0000000
--- a/ambari-web/app/templates/common/configs/propertyDependence_footer.hbs
+++ /dev/null
@@ -1,25 +0,0 @@
-{{!
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-}}
-
-<div class="pull-right">
-  <button class="btn btn-default" {{action onUndo target="view.parentView"}}>{{t common.cancel}}</button>
-  {{#if view.canIgnore}}
-    <button class="btn btn-warning" {{action onIgnore target="view.parentView"}}>{{t common.ignore}}</button>
-  {{/if}}
-  <button class="btn btn-success" {{action onApply target="view.parentView"}}>{{t common.apply}}</button>
-</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/templates/common/modal_popup.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/modal_popup.hbs b/ambari-web/app/templates/common/modal_popup.hbs
index 59e9c0b..f805ec6 100644
--- a/ambari-web/app/templates/common/modal_popup.hbs
+++ b/ambari-web/app/templates/common/modal_popup.hbs
@@ -24,7 +24,7 @@
       {{! Modal header }}
       <div class="modal-header">
         {{#if view.showCloseButton}}
-          <a class="close" {{action onClose target="view"}}>x</a>
+          <a class="close" {{action onClose target="view"}}>&times;</a>
         {{/if}}
         <h4 id="modal-label">
           {{#if view.headerClass}}
@@ -54,17 +54,23 @@
         {{else}}
           <div class="modal-footer">
             {{#if view.third}}
-              <button {{bindAttr disabled="view.disableThird" class=":btn view.thirdClass" id="view.thirdId"}} {{action onThird target="view"}}>{{view.third}}</button>
-            {{/if}}
-            {{#if view.secondary}}
-              <button {{bindAttr disabled="view.disableSecondary" class=":btn view.secondaryClass" id="view.secondaryId"}} {{action onSecondary target="view"}}>{{view.secondary}}</button>
-            {{/if}}
-            {{#if view.primary}}
-              <button {{bindAttr disabled="view.disablePrimary" class=":btn view.primaryClass" id="view.primaryId"}} {{action onPrimary target="view"}}>{{view.primary}}</button>
+              <button
+                {{bindAttr disabled="view.disableThird" class=":btn view.thirdClass" id="view.thirdId"}}
+                {{action onThird target="view"}}>
+                  {{view.third}}
+              </button>{{/if}}{{#if view.secondary}}<button
+                {{bindAttr disabled="view.disableSecondary" class=":btn view.secondaryClass" id="view.secondaryId"}}
+                {{action onSecondary target="view"}}>
+                  {{view.secondary}}
+              </button>{{/if}}{{#if view.primary}}<button
+                {{bindAttr disabled="view.disablePrimary" class=":btn view.primaryClass" id="view.primaryId"}}
+                {{action onPrimary target="view"}}>
+                  {{view.primary}}
+              </button>
             {{/if}}
           </div>
-            {{/if}}
-          {{/if}}
+        {{/if}}
+      {{/if}}
     </div>
   </div>
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/templates/common/progress.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/progress.hbs b/ambari-web/app/templates/common/progress.hbs
index 8485698..fce5641 100644
--- a/ambari-web/app/templates/common/progress.hbs
+++ b/ambari-web/app/templates/common/progress.hbs
@@ -75,11 +75,13 @@
     </div>
   </div>
 </div>
-<div class="wizard-footer col-md-12">
-  <div class="btn-area">
-    {{#if view.showBackButton}}
-      <button class="btn btn-default pull-left" {{bindAttr disabled="controller.isBackButtonDisabled"}} {{action back target="controller"}}>&larr; {{t common.back}}</button>
-    {{/if}}
-    <button class="btn btn-success pull-right" {{bindAttr disabled="controller.isSubmitDisabled"}} {{action done target="controller"}}>{{{view.submitButtonText}}}</button>
+{{#unless view.isSimpleModal}}
+  <div class="wizard-footer col-md-12">
+    <div class="btn-area">
+      {{#if view.showBackButton}}
+        <button class="btn btn-default pull-left" {{bindAttr disabled="controller.isBackButtonDisabled"}} {{action back target="controller"}}>&larr; {{t common.back}}</button>
+      {{/if}}
+      <button class="btn btn-success pull-right" {{bindAttr disabled="controller.isSubmitDisabled"}} {{action done target="controller"}}>{{{view.submitButtonText}}}</button>
+    </div>
   </div>
-</div>
+{{/unless}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/templates/main/service/info/save_popup_footer.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/service/info/save_popup_footer.hbs b/ambari-web/app/templates/main/service/info/save_popup_footer.hbs
index 4907b3a..4f9a52f 100644
--- a/ambari-web/app/templates/main/service/info/save_popup_footer.hbs
+++ b/ambari-web/app/templates/main/service/info/save_popup_footer.hbs
@@ -17,7 +17,14 @@
 }}
 
 <div class="modal-footer">
-  <button type="button" class="btn btn-default" {{action onCancel target="view.parentView"}}>{{t common.cancel}}</button>
-  <button type="button" class="btn btn-default" {{action onDiscard target="view.parentView"}}>{{t common.discard}}</button>
-  <button type="button" class="btn btn-success" {{bindAttr disabled="view.isSaveDisabled"}} {{action onSave target="view.parentView"}}>{{t common.save}}</button>
+  <button type="button" class="btn btn-default"
+    {{action onCancel target="view.parentView"}}>
+      {{t common.cancel}}
+  </button><button type="button" class="btn btn-default"
+    {{action onDiscard target="view.parentView"}}>
+      {{t common.discard}}
+  </button><button type="button" class="btn btn-success"
+    {{bindAttr disabled="view.isSaveDisabled"}}
+    {{action onSave target="view.parentView"}}>
+      {{t common.save}}</button>
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/templates/wizard/step3/step3_host_warning_popup_footer.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/wizard/step3/step3_host_warning_popup_footer.hbs b/ambari-web/app/templates/wizard/step3/step3_host_warning_popup_footer.hbs
index d906b75..10c7e32 100644
--- a/ambari-web/app/templates/wizard/step3/step3_host_warning_popup_footer.hbs
+++ b/ambari-web/app/templates/wizard/step3/step3_host_warning_popup_footer.hbs
@@ -26,10 +26,12 @@
   {{/if}}
 </div>
 {{#if view.parentView.secondary}}
-  <button type="button"
-          class="btn btn-info" {{bindAttr disabled="view.isUpdateInProgress"}} {{action onSecondary target="view.parentView"}}>
-    <i class="glyphicon glyphicon-repeat"></i>&nbsp;{{view.parentView.secondary}}</button>
-{{/if}}
-{{#if view.parentView.primary}}
-  <button type="button" class="btn btn-default" {{action onPrimary target="view.parentView"}}>{{view.parentView.primary}}</button>
+  <button type="button" class="btn btn-info"
+    {{bindAttr disabled="view.isUpdateInProgress"}}
+    {{action onSecondary target="view.parentView"}}>
+      <i class="glyphicon glyphicon-repeat"></i>&nbsp;{{view.parentView.secondary}}
+  </button>{{/if}}{{#if view.parentView.primary}}<button type="button" class="btn btn-default"
+    {{action onPrimary target="view.parentView"}}>
+      {{view.parentView.primary}}
+  </button>
 {{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/views/common/assign_master_components_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/assign_master_components_view.js b/ambari-web/app/views/common/assign_master_components_view.js
index 73654c7..8ad1991 100644
--- a/ambari-web/app/views/common/assign_master_components_view.js
+++ b/ambari-web/app/views/common/assign_master_components_view.js
@@ -43,11 +43,9 @@ App.AssignMasterComponentsView = Em.View.extend({
    */
   shouldUseInputs: Em.computed.gt('controller.hosts.length', 25),
 
-  isBackButtonVisible: true,
-
-  isCancelButtonVisible: false,
+  isWizardStep: true,
 
-  acceptButtonText: Em.I18n.t('common.next') + '&rarr;',
+  isBackButtonVisible: true,
 
   didInsertElement: function () {
     this.get('controller').loadStep();

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/views/common/configs/service_configs_by_category_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/service_configs_by_category_view.js b/ambari-web/app/views/common/configs/service_configs_by_category_view.js
index 25f7dfb..68bf22c 100644
--- a/ambari-web/app/views/common/configs/service_configs_by_category_view.js
+++ b/ambari-web/app/views/common/configs/service_configs_by_category_view.js
@@ -254,8 +254,12 @@ App.ServiceConfigsByCategoryView = Em.View.extend(App.UserPref, App.ConfigOverri
         classNames: ['modal-690px-width'],
         modalDialogClasses: ['modal-lg'],
         showCloseButton: false,
+        primary: Em.I18n.t('common.apply'),
+        secondary: serviceId == 'MISC' ? Em.I18n.t('common.ignore') : null,
+        third: Em.I18n.t('common.cancel'),
+        secondaryClass: 'btn-warning',
         header: "Warning: you must also change these Service properties",
-        onApply: function () {
+        onPrimary: function () {
           self.get("newAffectedProperties").forEach(function(item) {
             if (item.isNewProperty) {
               self.createProperty({
@@ -275,11 +279,11 @@ App.ServiceConfigsByCategoryView = Em.View.extend(App.UserPref, App.ConfigOverri
           self.get("controller").set("miscModalVisible", false);
           this.hide();
         },
-        onIgnore: function () {
+        onSecondary: function () {
           self.get("controller").set("miscModalVisible", false);
           this.hide();
         },
-        onUndo: function () {
+        onThird: function () {
           var affected = self.get("newAffectedProperties").objectAt(0),
             changedProperty = self.get("controller.stepConfigs").findProperty("serviceName", affected.sourceServiceName)
               .get("configs").findProperty("name", affected.changedPropertyName);
@@ -287,11 +291,6 @@ App.ServiceConfigsByCategoryView = Em.View.extend(App.UserPref, App.ConfigOverri
           self.get("controller").set("miscModalVisible", false);
           this.hide();
         },
-        footerClass: Em.View.extend({
-          classNames: ['modal-footer'],
-          templateName: require('templates/common/configs/propertyDependence_footer'),
-          canIgnore: serviceId == 'MISC'
-        }),
         bodyClass: Em.View.extend({
           templateName: require('templates/common/configs/propertyDependence'),
           controller: this,

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/views/main/admin/kerberos/disable_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/kerberos/disable_view.js b/ambari-web/app/views/main/admin/kerberos/disable_view.js
index 64657e8..f7b3527 100644
--- a/ambari-web/app/views/main/admin/kerberos/disable_view.js
+++ b/ambari-web/app/views/main/admin/kerberos/disable_view.js
@@ -37,6 +37,6 @@ App.KerberosDisableView = App.KerberosProgressPageView.extend({
 
   msgColor: 'alert-info',
 
-  submitButtonText: Em.I18n.t('common.complete')
+  isSimpleModal: true
 
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b6d1cd3/ambari-web/app/views/wizard/step7/assign_master_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/wizard/step7/assign_master_view.js b/ambari-web/app/views/wizard/step7/assign_master_view.js
index b795647..0407afe 100644
--- a/ambari-web/app/views/wizard/step7/assign_master_view.js
+++ b/ambari-web/app/views/wizard/step7/assign_master_view.js
@@ -26,13 +26,9 @@ App.AssignMasterOnStep7View = App.AssignMasterComponentsView.extend({
 
   showTitle: false,
 
-  acceptButtonText: Em.I18n.t('common.select'),
-
   alertMessage: '',
 
-  isBackButtonVisible: false,
-
-  isCancelButtonVisible: true,
+  isWizardStep: false,
 
   willInsertElement: function() {
     this._super();


[49/50] ambari git commit: Revert "AMBARI-20035. Duration in BGoperation window should display durations in proper time units (alexantonenko)"

Posted by nc...@apache.org.
Revert "AMBARI-20035. Duration in BGoperation window should display durations in proper time units (alexantonenko)"

This reverts commit ab53946fa63ad209158300ca0907e5a67097e717.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cb030a4e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cb030a4e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cb030a4e

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: cb030a4e5b829d5adbfbffbb1c02aa94caa6d72b
Parents: 56eb5a7
Author: Alex Antonenko <hi...@gmail.com>
Authored: Thu Feb 23 19:13:49 2017 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Thu Feb 23 19:14:49 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/utils/date/date.js               | 26 ++++++++---------
 .../dashboard/widgets/uptime_text_widget.js     |  9 ++----
 ambari-web/test/mappers/service_mapper_test.js  |  2 +-
 ambari-web/test/utils/date/date_test.js         | 30 ++++++++++----------
 .../stack_upgrade/upgrade_history_view_test.js  |  8 +++---
 .../widgets/hbase_master_uptime_test.js         |  4 +--
 .../dashboard/widgets/namenode_uptime_test.js   |  4 +--
 .../widgets/resource_manager_uptime_test.js     |  4 +--
 .../widgets/uptime_text_widget_test.js          |  4 +--
 9 files changed, 44 insertions(+), 47 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/cb030a4e/ambari-web/app/utils/date/date.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/date/date.js b/ambari-web/app/utils/date/date.js
index 986815e..d461d21 100644
--- a/ambari-web/app/utils/date/date.js
+++ b/ambari-web/app/utils/date/date.js
@@ -153,13 +153,13 @@ module.exports = {
    * 30 ms = 30 ms
    * 300 ms = 300 ms
    * 999 ms = 999 ms
-   * 1000 ms = 1 secs
-   * 3000 ms = 3 secs
-   * 35000 ms = 35 secs
-   * 350000 ms = 350 secs
-   * 999999 ms = 999 secs
-   * 1000000 ms = 17 mins
-   * 3500000 secs = 58 mins
+   * 1000 ms = 1.00 secs
+   * 3000 ms = 3.00 secs
+   * 35000 ms = 35.00 secs
+   * 350000 ms = 350.00 secs
+   * 999999 ms = 999.99 secs
+   * 1000000 ms = 16.66 mins
+   * 3500000 secs = 58.33 mins
    *
    * @param {number} time
    * @param {bool} [zeroValid] for the case to show 0 when time is 0, not null
@@ -175,7 +175,6 @@ module.exports = {
       return null;
     }
     var timeStr = intTime.toString();
-    var date = new Date(intTime);
     var lengthOfNumber = timeStr.length;
     var oneMinMs = 60000;
     var oneHourMs = 3600000;
@@ -185,18 +184,19 @@ module.exports = {
       return time + ' ms';
     }
     if (lengthOfNumber < 7) {
-      time = (time / 1000).toFixed(0);
+      time = (time / 1000).toFixed(2);
       return time + ' secs';
     }
     if (time < oneHourMs) {
-      time = (time / oneMinMs).toFixed(0);
+      time = (time / oneMinMs).toFixed(2);
       return time + ' mins';
     }
     if (time < oneDayMs) {
-      return date.getUTCHours() + 'h '+ date.getUTCMinutes() + 'm ' + date.getUTCSeconds() +'s';
+      time = (time / oneHourMs).toFixed(2);
+      return time + ' hours';
     }
-
-    return ((date.getUTCFullYear() - 1970) * 365 + date.getUTCMonth() * 31 + date.getUTCDate()-1) + 'd ' + date.getUTCHours() + 'h ' + date.getUTCMinutes() + 'm';
+    time = (time / oneDayMs).toFixed(2);
+    return time + ' days';
   },
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/cb030a4e/ambari-web/app/views/main/dashboard/widgets/uptime_text_widget.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets/uptime_text_widget.js b/ambari-web/app/views/main/dashboard/widgets/uptime_text_widget.js
index e1b9c42..74d59f1 100644
--- a/ambari-web/app/views/main/dashboard/widgets/uptime_text_widget.js
+++ b/ambari-web/app/views/main/dashboard/widgets/uptime_text_widget.js
@@ -76,23 +76,20 @@ App.UptimeTextDashboardWidgetView = App.TextDashboardWidgetView.extend({
   uptimeProcessing: function (uptime) {
     var uptimeString = this.timeConverter(uptime);
     var diff = App.dateTimeWithTimeZone() - uptime;
-    var valueType = "";
     if (diff < 0) {
       diff = 0;
     }
     var formatted = date.timingFormat(diff); //17.67 days
     var timeUnit = null;
     if (formatted) {
-      valueType = formatted.split(" ")[0];
-      switch (valueType[valueType.length-1]) {
+      switch (formatted.split(" ")[1]) {
         case 'secs':
           timeUnit = 's';
           break;
-        case 'h':
+        case 'hours':
           timeUnit = 'hr';
-
           break;
-        case 'd':
+        case 'days':
           timeUnit = 'd';
           break;
         case 'mins':

http://git-wip-us.apache.org/repos/asf/ambari/blob/cb030a4e/ambari-web/test/mappers/service_mapper_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mappers/service_mapper_test.js b/ambari-web/test/mappers/service_mapper_test.js
index c5a0b10..4a8d49d 100644
--- a/ambari-web/test/mappers/service_mapper_test.js
+++ b/ambari-web/test/mappers/service_mapper_test.js
@@ -220,7 +220,7 @@ describe('App.serviceMetricsMapper', function () {
         message: 'Storm mapper, stack version 2.1',
         expectedValues: {
           total_executors: 2,
-          nimbus_uptime: "3h 57m 30s",
+          nimbus_uptime: "3.96 hours",
           free_slots: 2,
           used_slots: 0,
           total_slots: 2,

http://git-wip-us.apache.org/repos/asf/ambari/blob/cb030a4e/ambari-web/test/utils/date/date_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/date/date_test.js b/ambari-web/test/utils/date/date_test.js
index c005222..5289fdd 100644
--- a/ambari-web/test/utils/date/date_test.js
+++ b/ambari-web/test/utils/date/date_test.js
@@ -90,23 +90,23 @@ describe('date', function () {
       {i: '30', e:'30 ms'},
       {i: '300', e:'300 ms'},
       {i: '999', e:'999 ms'},
-      {i: '1000', e:'1 secs'},
-      {i: '3000', e:'3 secs'},
-      {i: '35000', e:'35 secs'},
-      {i: '350000', e:'350 secs'},
-      {i: '999999', e:'1000 secs'},
-      {i: '1000000', e:'17 mins'},
-      {i: '3500000', e:'58 mins'},
-      {i: '35000000', e:'9h 43m 20s'},
-      {i: '350000000', e:'4d 1h 13m'},
-      {i: '3500000000', e:'40d 12h 13m'},
-      {i: '35000000000', e:'405d 2h 13m'}
+      {i: '1000', e:'1.00 secs'},
+      {i: '3000', e:'3.00 secs'},
+      {i: '35000', e:'35.00 secs'},
+      {i: '350000', e:'350.00 secs'},
+      {i: '999999', e:'1000.00 secs'},
+      {i: '1000000', e:'16.67 mins'},
+      {i: '3500000', e:'58.33 mins'},
+      {i: '35000000', e:'9.72 hours'},
+      {i: '350000000', e:'4.05 days'},
+      {i: '3500000000', e:'40.51 days'},
+      {i: '35000000000', e:'405.09 days'}
     ]);
 
     describe('Correct data', function(){
       tests.forEach(function(test) {
         it(test.i, function() {
-          expect(date.timingFormat(test.i)).to.be.equal(test.e);
+          expect(date.timingFormat(test.i)).to.equal(test.e);
         });
       });
     });
@@ -148,12 +148,12 @@ describe('date', function () {
       {
         startTimestamp: 1349752195000,
         endTimestamp: 1349752199000,
-        e: '4 secs'
+        e: '4.00 secs'
       },
       {
         startTimestamp: 1349752195000,
         endTimestamp: 1367752195000,
-        e: '213d 8h 0m'
+        e: '208.33 days'
       },
       {
         startTimestamp: -10000000,
@@ -170,7 +170,7 @@ describe('date', function () {
         startTimestamp: 100000000,
         endTimestamp: -1,
         stubbed: true,
-        e: '19 secs'
+        e: '19.00 secs'
       }
     ];
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/cb030a4e/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js
index 13da96e..0ca7080 100644
--- a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js
+++ b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js
@@ -135,13 +135,13 @@ describe('App.MainAdminStackUpgradeHistoryView', function () {
       event = {
         context: Em.Object.create({
           isSelected: false,
-          value: 'ALL'
+          value: 'ALL',
         })
       };
       view.set('categories', [
         Em.Object.create({
           isSelected: true,
-          value: 'UPGRADE_COMPLETED'
+          value: 'UPGRADE_COMPLETED',
         }),
         event.context
       ]);
@@ -192,12 +192,12 @@ describe('App.MainAdminStackUpgradeHistoryView', function () {
       Em.Object.create({
         directionLabel: Em.I18n.t('common.upgrade'),
         upgradeTypeLabel: Em.I18n.t('common.rolling'),
-        duration: '1h 0m 0s'
+        duration: '1.00 hours'
       }),
       Em.Object.create({
         directionLabel: Em.I18n.t('common.downgrade'),
         upgradeTypeLabel: Em.I18n.t('common.hostOrdered'),
-        duration: '2h 0m 0s'
+        duration: '2.00 hours'
       })
     ];
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/cb030a4e/ambari-web/test/views/main/dashboard/widgets/hbase_master_uptime_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets/hbase_master_uptime_test.js b/ambari-web/test/views/main/dashboard/widgets/hbase_master_uptime_test.js
index 15b1ce3..4f19a35 100644
--- a/ambari-web/test/views/main/dashboard/widgets/hbase_master_uptime_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets/hbase_master_uptime_test.js
@@ -33,8 +33,8 @@ describe('App.HBaseMasterUptimeView', function () {
       e: {
         isGreen: true,
         isNA: false,
-        content: '197.0 d',
-        data: 197
+        content: '192.1 d',
+        data: 192.1
       }
     },
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/cb030a4e/ambari-web/test/views/main/dashboard/widgets/namenode_uptime_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets/namenode_uptime_test.js b/ambari-web/test/views/main/dashboard/widgets/namenode_uptime_test.js
index 50a39c7..bfd101c 100644
--- a/ambari-web/test/views/main/dashboard/widgets/namenode_uptime_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets/namenode_uptime_test.js
@@ -35,8 +35,8 @@ describe('App.NameNodeUptimeView', function() {
         isOrange: false,
         isGreen: true,
         isNA: false,
-        content: '197.0 d',
-        data: 197
+        content: '192.1 d',
+        data: 192.1
       }
     },
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/cb030a4e/ambari-web/test/views/main/dashboard/widgets/resource_manager_uptime_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets/resource_manager_uptime_test.js b/ambari-web/test/views/main/dashboard/widgets/resource_manager_uptime_test.js
index 828133e..d4a9b34 100644
--- a/ambari-web/test/views/main/dashboard/widgets/resource_manager_uptime_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets/resource_manager_uptime_test.js
@@ -33,8 +33,8 @@ describe('App.ResourceManagerUptimeView', function() {
       e: {
         isGreen: true,
         isNA: false,
-        content: '197.0 d',
-        data: 197
+        content: '192.1 d',
+        data: 192.1
       }
     },
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/cb030a4e/ambari-web/test/views/main/dashboard/widgets/uptime_text_widget_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets/uptime_text_widget_test.js b/ambari-web/test/views/main/dashboard/widgets/uptime_text_widget_test.js
index 00d224c..fa20593 100644
--- a/ambari-web/test/views/main/dashboard/widgets/uptime_text_widget_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets/uptime_text_widget_test.js
@@ -60,7 +60,7 @@ describe('App.UptimeTextDashboardWidgetView', function() {
       {
         diff: 10*1000,
         e: {
-          timeUnit: 'secs'
+          timeUnit: 's'
         }
       },
       {
@@ -78,7 +78,7 @@ describe('App.UptimeTextDashboardWidgetView', function() {
       {
         diff: 1800*1000,
         e: {
-          timeUnit: 'mins'
+          timeUnit: 'min'
         }
       }
     ];