You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2017/02/24 14:19:16 UTC

[09/50] ambari git commit: AMBARI-20086. Hive View 2.0: Code Refactoring. Removal of the database cache. (dipayanb)

AMBARI-20086. Hive View 2.0: Code Refactoring. Removal of the database cache. (dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3e76e47c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3e76e47c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3e76e47c

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 3e76e47c5430704723801819e7f5b59759242e61
Parents: bc4b8bc
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Wed Feb 22 12:35:57 2017 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Wed Feb 22 12:36:46 2017 +0530

----------------------------------------------------------------------
 .../ambari/view/hive20/ConnectionSystem.java    |  27 --
 .../hive20/actor/DatabaseChangeNotifier.java    | 168 ----------
 .../view/hive20/actor/DatabaseManager.java      | 313 -------------------
 .../ambari/view/hive20/actor/JdbcConnector.java |   4 -
 .../ambari/view/hive20/actor/LogAggregator.java |   4 +-
 .../view/hive20/actor/MetaDataManager.java      | 137 --------
 .../view/hive20/actor/MetaDataRetriever.java    | 173 ----------
 .../view/hive20/actor/TableChangeNotifier.java  |  95 ------
 .../ambari/view/hive20/actor/message/Ping.java  |  50 ---
 .../ambari/view/hive20/client/DDLDelegator.java |   7 +-
 .../view/hive20/client/DDLDelegatorImpl.java    |  52 ++-
 .../view/hive20/internal/dto/TableInfo.java     |  12 +-
 .../view/hive20/internal/dto/TableResponse.java |   9 -
 .../view/hive20/resources/browser/DDLProxy.java |  67 ++--
 .../resources/browser/HiveBrowserService.java   | 259 ---------------
 .../hive20/resources/system/SystemService.java  |  28 +-
 .../utils/MetaDataManagerEventSubmitter.java    |  43 ---
 .../src/main/resources/ui/app/adapters/ping.js  |  35 ---
 .../main/resources/ui/app/adapters/ranger.js    |  27 ++
 .../src/main/resources/ui/app/models/table.js   |   9 +-
 .../main/resources/ui/app/routes/application.js |   5 -
 .../databases/database/tables/table/auth.js     |   2 +-
 .../resources/ui/app/serializers/database.js    |  25 ++
 .../main/resources/ui/app/serializers/table.js  |  22 ++
 .../resources/ui/app/services/keep-alive.js     |  31 --
 .../views/hive20/src/main/resources/view.xml    |   5 -
 26 files changed, 152 insertions(+), 1457 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java
index a6c7334..d7fbf41 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java
@@ -28,7 +28,6 @@ import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive20.actor.DeathWatch;
-import org.apache.ambari.view.hive20.actor.MetaDataManager;
 import org.apache.ambari.view.hive20.actor.OperationController;
 import org.apache.ambari.view.hive20.internal.ConnectionSupplier;
 import org.apache.ambari.view.hive20.internal.DataStorageSupplier;
@@ -47,7 +46,6 @@ public class ConnectionSystem {
   private static volatile ConnectionSystem instance = null;
   private static final Object lock = new Object();
   private static Map<String, Map<String, ActorRef>> operationControllerMap = new ConcurrentHashMap<>();
-  private final Map<String, ActorRef> metaDataManagerMap = new ConcurrentHashMap<>();
 
   // credentials map stores usernames and passwords
   private static Map<String, String> credentialsMap = new ConcurrentHashMap<>();
@@ -107,31 +105,6 @@ public class ConnectionSystem {
     return ref;
   }
 
-  /**
-   * Returns one MetaDataManager actor per view instance
-   * @param context - View context
-   * @return MetaDataManager actor
-   */
-  public synchronized ActorRef getMetaDataManager(ViewContext context) {
-    SafeViewContext safeViewContext = new SafeViewContext(context);
-    String instanceName = safeViewContext.getInstanceName();
-    ActorRef metaDataManager = metaDataManagerMap.get(instanceName);
-    if(metaDataManager == null) {
-      metaDataManager = createMetaDataManager(safeViewContext);
-      metaDataManagerMap.put(instanceName, metaDataManager);
-    }
-
-    return metaDataManager;
-  }
-
-  public synchronized Optional<ActorRef> getMetaDataManagerIfPresent(String instanceName) {
-    return Optional.fromNullable(metaDataManagerMap.get(instanceName));
-  }
-
-  private ActorRef createMetaDataManager(SafeViewContext safeViewContext) {
-    return actorSystem.actorOf(MetaDataManager.props(safeViewContext));
-  }
-
   public synchronized void persistCredentials(String user,String password){
     if(!Strings.isNullOrEmpty(password)){
       credentialsMap.put(user,password);

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseChangeNotifier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseChangeNotifier.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseChangeNotifier.java
deleted file mode 100644
index 37f24d2..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseChangeNotifier.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.actor;
-
-import akka.actor.ActorRef;
-import akka.actor.PoisonPill;
-import akka.actor.Props;
-import com.google.common.collect.Sets;
-import org.apache.ambari.view.hive20.actor.message.HiveMessage;
-import org.apache.ambari.view.hive20.internal.dto.TableInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-/**
- *
- */
-public class DatabaseChangeNotifier extends HiveActor {
-  private final Logger LOG = LoggerFactory.getLogger(getClass());
-
-  private String currentDatabaseName;
-  private Map<String, TableWrapper> tables = new HashMap<>();
-  private Map<String, TableInfo> newTables = new HashMap<>();
-
-  @Override
-  public void handleMessage(HiveMessage hiveMessage) {
-    Object message = hiveMessage.getMessage();
-    if(message instanceof DatabaseAdded) {
-      handleDatabaseAdded((DatabaseAdded) message);
-    } else if ( message instanceof DatabaseRemoved) {
-      handleDatabaseRemoved((DatabaseRemoved) message);
-    } else if (message instanceof TableUpdated) {
-      handleTableUpdated((TableUpdated) message);
-    } else if (message instanceof AllTablesUpdated) {
-      handleAllTableUpdated((AllTablesUpdated) message);
-    }
-  }
-
-  private void handleDatabaseAdded(DatabaseAdded message) {
-    LOG.info("Database Added: {}", message.name);
-    currentDatabaseName = message.name;
-    // TODO: Send event to eventbus
-  }
-
-  private void handleDatabaseRemoved(DatabaseRemoved message) {
-    LOG.info("Database Removed: {}", message.name);
-    // TODO: Send event to eventbus
-  }
-
-  private void handleTableUpdated(TableUpdated message) {
-    LOG.info("XXXXX: table xxxx. Size: {}", newTables.size());
-    newTables.put(message.info.getName(), message.info);
-  }
-
-  private void handleAllTableUpdated(AllTablesUpdated message) {
-    Set<String> oldTableNames = new HashSet<>(tables.keySet());
-    Set<String> newTableNames = new HashSet<>(newTables.keySet());
-
-    Set<String> tablesAdded = Sets.difference(newTableNames, oldTableNames);
-    Set<String> tablesRemoved = Sets.difference(oldTableNames, newTableNames);
-    Set<String> tablesUpdated = Sets.intersection(oldTableNames, newTableNames);
-
-    updateTablesAdded(tablesAdded);
-    updateTablesRemoved(tablesRemoved);
-    updateTablesUpdated(tablesUpdated);
-    newTables.clear();
-  }
-
-  private void updateTablesAdded(Set<String> tablesAdded) {
-    for (String tableName: tablesAdded) {
-      TableWrapper wrapper = new TableWrapper(tableName);
-      tables.put(tableName, wrapper);
-      wrapper.getTableNotifier().tell(new TableChangeNotifier.TableAdded(newTables.get(tableName)), getSelf());
-    }
-  }
-
-  private void updateTablesRemoved(Set<String> tablesRemoved) {
-    for(String tableName: tablesRemoved) {
-      TableWrapper tableWrapper = tables.remove(tableName);
-      tableWrapper.getTableNotifier().tell(new TableChangeNotifier.TableRemoved(tableName), getSelf());
-      tableWrapper.getTableNotifier().tell(PoisonPill.getInstance(), getSelf());
-    }
-  }
-
-  private void updateTablesUpdated(Set<String> tablesUpdated) {
-    for(String tableName: tablesUpdated) {
-      TableWrapper tableWrapper = tables.get(tableName);
-      // TODO: Check what needs to be done here.
-    }
-  }
-
-  public static Props props() {
-    return Props.create(DatabaseChangeNotifier.class);
-  }
-
-  public class TableWrapper {
-    private final String tableName;
-    private final ActorRef tableNotifier;
-
-    private TableWrapper(String tableName) {
-      this.tableName = tableName;
-      this.tableNotifier = getContext().actorOf(TableChangeNotifier.props());
-    }
-
-    public String getTableName() {
-      return tableName;
-    }
-
-    public ActorRef getTableNotifier() {
-      return tableNotifier;
-    }
-  }
-
-  public static class DatabaseAdded {
-    private final String name;
-
-    public DatabaseAdded(String name) {
-      this.name = name;
-    }
-  }
-
-
-  public static class DatabaseRemoved {
-    private final String name;
-
-    public DatabaseRemoved(String name) {
-      this.name = name;
-    }
-  }
-
-  public static class TableUpdated {
-    private final TableInfo info;
-
-    public TableUpdated(TableInfo info) {
-      this.info = info;
-    }
-  }
-
-  public static class AllTablesUpdated {
-    private final String database;
-
-    public AllTablesUpdated(String database) {
-      this.database = database;
-    }
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseManager.java
deleted file mode 100644
index bd7c6bd..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseManager.java
+++ /dev/null
@@ -1,313 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.actor;
-
-import akka.actor.ActorRef;
-import akka.actor.PoisonPill;
-import akka.actor.Props;
-import com.google.common.base.Function;
-import com.google.common.collect.FluentIterable;
-import com.google.common.collect.Sets;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive20.AuthParams;
-import org.apache.ambari.view.hive20.ConnectionFactory;
-import org.apache.ambari.view.hive20.actor.message.HiveMessage;
-import org.apache.ambari.view.hive20.client.ConnectionConfig;
-import org.apache.ambari.view.hive20.internal.Connectable;
-import org.apache.ambari.view.hive20.internal.HiveConnectionWrapper;
-import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
-import org.apache.ambari.view.hive20.internal.dto.TableInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import scala.concurrent.duration.Duration;
-
-import javax.annotation.Nullable;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-
-/**
- * Manages database related state, queries Hive to get the list of databases and then manages state for each database.
- * Also, periodically updates the list of databases by calling hive.
- */
-public class DatabaseManager extends HiveActor {
-
-  private final Logger LOG = LoggerFactory.getLogger(getClass());
-
-  private final Connectable connectable;
-
-  private final ActorRef metaDataRetriever;
-  private final String username;
-
-  private boolean refreshInProgress = false;
-  private boolean selfRefreshQueued = false;
-
-  private Map<String, DatabaseWrapper> databases = new HashMap<>();
-  private Set<String> databasesToUpdate;
-
-
-  public DatabaseManager(String username, Connectable connectable) {
-    this.username = username;
-    this.connectable = connectable;
-    metaDataRetriever = getContext().actorOf(MetaDataRetriever.props(connectable));
-  }
-
-  @Override
-  public void handleMessage(HiveMessage hiveMessage) {
-
-    Object message = hiveMessage.getMessage();
-    if (message instanceof Refresh) {
-      handleRefresh((Refresh) message);
-    } else if (message instanceof SelfRefresh) {
-      handleSelfRefresh();
-    } else if (message instanceof MetaDataRetriever.DBRefreshed) {
-      handleDBRefreshed((MetaDataRetriever.DBRefreshed) message);
-    } else if (message instanceof MetaDataRetriever.TableRefreshed) {
-      handleTableRefreshed((MetaDataRetriever.TableRefreshed) message);
-    } else if (message instanceof MetaDataRetriever.AllTableRefreshed) {
-      handleAllTableRefreshed((MetaDataRetriever.AllTableRefreshed) message);
-    } else if (message instanceof GetDatabases) {
-      handleGetDatabases((GetDatabases) message);
-    }
-
-  }
-
-  private void handleSelfRefresh() {
-    if (refreshInProgress) {
-      getContext().system().scheduler().scheduleOnce(Duration.create(500, TimeUnit.MILLISECONDS),
-          getSelf(), new SelfRefresh(), getContext().dispatcher(), getSelf());
-    } else {
-      selfRefreshQueued = false;
-      refresh(true);
-    }
-  }
-
-  private void handleRefresh(Refresh message) {
-    if (refreshInProgress && selfRefreshQueued) {
-      return; // We will not honor refresh message when a refresh is going on and another self refresh is queued in mailbox
-    } else if (refreshInProgress) {
-      selfRefreshQueued = true; // If refresh is in progress, we will queue up only one refresh message.
-      getContext().system().scheduler().scheduleOnce(Duration.create(500, TimeUnit.MILLISECONDS),
-          getSelf(), new SelfRefresh(), getContext().dispatcher(), getSelf());
-    } else {
-      refresh(message.initiateScheduler());
-    }
-  }
-
-  private void handleDBRefreshed(MetaDataRetriever.DBRefreshed message) {
-    Set<DatabaseInfo> databasesInfos = message.getDatabases();
-    Set<String> currentDatabases = new HashSet<>(databases.keySet());
-    Set<String> newDatabases = FluentIterable.from(databasesInfos).transform(new Function<DatabaseInfo, String>() {
-      @Nullable
-      @Override
-      public String apply(@Nullable DatabaseInfo databaseInfo) {
-        return databaseInfo.getName();
-      }
-    }).toSet();
-
-    databasesToUpdate = new HashSet<>(newDatabases);
-
-    Set<String> databasesAdded = Sets.difference(newDatabases, currentDatabases);
-    Set<String> databasesRemoved = Sets.difference(currentDatabases, newDatabases);
-
-    updateDatabasesAdded(databasesAdded, databasesInfos);
-    updateDatabasesRemoved(databasesRemoved);
-  }
-
-  private void updateDatabasesAdded(Set<String> databasesAdded, Set<DatabaseInfo> databasesInfos) {
-    for (DatabaseInfo info : databasesInfos) {
-      if (databasesAdded.contains(info.getName())) {
-        DatabaseWrapper wrapper = new DatabaseWrapper(info);
-        databases.put(info.getName(), wrapper);
-        wrapper.getDatabaseNotifier().tell(new DatabaseChangeNotifier.DatabaseAdded(info.getName()), getSelf());
-      }
-    }
-  }
-
-  private void updateDatabasesRemoved(Set<String> databasesRemoved) {
-    for (String database : databasesRemoved) {
-      DatabaseWrapper wrapper = databases.remove(database);
-      ActorRef notifier = wrapper.getDatabaseNotifier();
-      notifier.tell(new DatabaseChangeNotifier.DatabaseRemoved(database), getSelf());
-      notifier.tell(PoisonPill.getInstance(), getSelf());
-    }
-  }
-
-  private void handleTableRefreshed(MetaDataRetriever.TableRefreshed message) {
-    ActorRef databaseChangeNotifier = getDatabaseChangeNotifier(message.getDatabase());
-    updateTable(message.getDatabase(), message.getTable());
-    databaseChangeNotifier.tell(new DatabaseChangeNotifier.TableUpdated(message.getTable()), getSelf());
-  }
-
-  private void handleAllTableRefreshed(MetaDataRetriever.AllTableRefreshed message) {
-    ActorRef databaseChangeNotifier = getDatabaseChangeNotifier(message.getDatabase());
-    updateRemovedTables(message.getDatabase(), message.getCurrentTableNames());
-    databaseChangeNotifier.tell(new DatabaseChangeNotifier.AllTablesUpdated(message.getDatabase()), getSelf());
-    if (checkIfAllTablesOfAllDatabaseRefeshed(message)) {
-      refreshInProgress = false;
-    }
-  }
-
-  private void handleGetDatabases(GetDatabases message) {
-    if (refreshInProgress) {
-      // If currently refreshing, then schedule the same message after 500 milliseconds
-      getContext().system().scheduler().scheduleOnce(Duration.create(500, TimeUnit.MILLISECONDS),
-          getSelf(), message, getContext().dispatcher(), getSender());
-      return;
-    }
-    Set<DatabaseInfo> infos = new HashSet<>();
-    for (DatabaseWrapper wrapper : databases.values()) {
-      infos.add(wrapper.getDatabase());
-    }
-    getSender().tell(new DatabasesResult(infos), getSelf());
-  }
-
-  private boolean checkIfAllTablesOfAllDatabaseRefeshed(MetaDataRetriever.AllTableRefreshed message) {
-    databasesToUpdate.remove(message.getDatabase());
-    return databasesToUpdate.isEmpty();
-  }
-
-  private ActorRef getDatabaseChangeNotifier(String databaseName) {
-    DatabaseWrapper wrapper = databases.get(databaseName);
-    ActorRef databaseChangeNotifier = null;
-    if (wrapper != null) {
-      databaseChangeNotifier = wrapper.getDatabaseNotifier();
-    }
-    return databaseChangeNotifier;
-  }
-
-  private void refresh(boolean initiateScheduler) {
-    LOG.info("Received refresh for user");
-    refreshInProgress = true;
-    metaDataRetriever.tell(new MetaDataRetriever.RefreshDB(), getSelf());
-
-    if (initiateScheduler) {
-      scheduleRefreshAfter(1, TimeUnit.MINUTES);
-    }
-  }
-
-  private void scheduleRefreshAfter(long time, TimeUnit timeUnit) {
-    getContext().system().scheduler().scheduleOnce(Duration.create(time, timeUnit),
-        getSelf(), new Refresh(username), getContext().dispatcher(), getSelf());
-  }
-
-  @Override
-  public void postStop() throws Exception {
-    LOG.info("Database Manager stopped!!!");
-    connectable.disconnect();
-  }
-
-  private void updateTable(String databaseName, TableInfo table) {
-    DatabaseWrapper wrapper = databases.get(databaseName);
-    if (wrapper != null) {
-      DatabaseInfo info = wrapper.getDatabase();
-      info.getTables().add(table);
-    }
-  }
-
-  private void updateRemovedTables(String database, Set<String> currentTableNames) {
-    DatabaseWrapper wrapper = databases.get(database);
-    HashSet<TableInfo> notRemovedTables = new HashSet<>();
-    if (wrapper != null) {
-      DatabaseInfo info = wrapper.getDatabase();
-      for (TableInfo tableInfo : info.getTables()) {
-        if (currentTableNames.contains(tableInfo.getName())) {
-          notRemovedTables.add(tableInfo);
-        }
-      }
-      info.setTables(notRemovedTables);
-    }
-  }
-
-  public static Props props(ViewContext context) {
-    ConnectionConfig config = ConnectionFactory.create(context);
-    Connectable connectable = new HiveConnectionWrapper(config.getJdbcUrl(), config.getUsername(), config.getPassword(), new AuthParams(context));
-    return Props.create(DatabaseManager.class, config.getUsername(), connectable);
-  }
-
-  public static class Refresh {
-    private final String username;
-    private final boolean initiateScheduler;
-
-
-    public Refresh(String username) {
-      this(username, true);
-    }
-
-    public Refresh(String username, boolean initiateScheduler) {
-      this.username = username;
-      this.initiateScheduler = initiateScheduler;
-    }
-
-    public String getUsername() {
-      return username;
-    }
-
-    public boolean initiateScheduler() {
-      return initiateScheduler;
-    }
-  }
-
-  private static class SelfRefresh {
-  }
-
-  private class DatabaseWrapper {
-    private final DatabaseInfo database;
-    private final ActorRef databaseNotifier;
-
-    private DatabaseWrapper(DatabaseInfo database) {
-      this.database = database;
-      databaseNotifier = getContext().actorOf(DatabaseChangeNotifier.props());
-    }
-
-    public DatabaseInfo getDatabase() {
-      return database;
-    }
-
-    public ActorRef getDatabaseNotifier() {
-      return databaseNotifier;
-    }
-  }
-
-  public static class GetDatabases {
-    private final String username;
-
-    public GetDatabases(String username) {
-      this.username = username;
-    }
-
-    public String getUsername() {
-      return username;
-    }
-  }
-
-  public static class DatabasesResult {
-    private final Set<DatabaseInfo> databases;
-
-    public DatabasesResult(Set<DatabaseInfo> databases) {
-      this.databases = databases;
-    }
-
-    public Set<DatabaseInfo> getDatabases() {
-      return databases;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
index 1855afc..1d9744b 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
@@ -57,7 +57,6 @@ import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
 import org.apache.ambari.view.hive20.utils.HiveActorConfiguration;
-import org.apache.ambari.view.hive20.utils.MetaDataManagerEventSubmitter;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.hive.jdbc.HiveConnection;
 import org.slf4j.Logger;
@@ -290,9 +289,6 @@ public class JdbcConnector extends HiveActor {
     LOG.info("Finished processing SQL statements for Job id : {}", jobId.or("SYNC JOB"));
     if (isAsync() && jobId.isPresent()) {
       updateJobStatus(jobId.get(), Job.JOB_STATE_FINISHED);
-
-      LOG.info("Sending event to refresh meta information for user {} and instance {}", username, instanceName);
-      MetaDataManagerEventSubmitter.sendDBRefresh(username, instanceName);
     }
 
     if (resultSetOptional.isPresent()) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java
index 600ea64..2c8a65d 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java
@@ -72,9 +72,9 @@ public class LogAggregator extends HiveActor {
       try {
         getMoreLogs();
       } catch (SQLException e) {
-        LOG.error("SQL Error while getting logs. Tried writing to: {}", logFile);
+        LOG.warn("SQL Error while getting logs. Tried writing to: {}. Exception: {}", logFile, e.getMessage());
       } catch (HdfsApiException e) {
-        LOG.warn("HDFS Error while getting writing logs to {}", logFile);
+        LOG.warn("HDFS Error while writing logs to {}. Exception: {}", logFile, e.getMessage());
 
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataManager.java
deleted file mode 100644
index 525ec0d..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataManager.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.actor;
-
-import akka.actor.ActorRef;
-import akka.actor.Cancellable;
-import akka.actor.Props;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive20.actor.message.HiveMessage;
-import org.apache.ambari.view.hive20.actor.message.Ping;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import scala.concurrent.duration.Duration;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-/**
- * Manages the Meta Information for Hive Server. Singleton actor which stores several DatabaseManagerActor in memory for
- * each user and instance name combination.
- */
-public class MetaDataManager extends HiveActor {
-
-  private final Logger LOG = LoggerFactory.getLogger(getClass());
-
-  /**
-   * Stores the sub database manager actors per user combination
-   */
-  private final Map<String, ActorRef> databaseManagers = new HashMap<>();
-  private final Map<String, Cancellable> terminationSchedulers = new HashMap<>();
-  private final ViewContext context;
-
-  public MetaDataManager(ViewContext context) {
-    this.context = context;
-  }
-
-  @Override
-  public void handleMessage(HiveMessage hiveMessage) {
-
-    Object message = hiveMessage.getMessage();
-    if (message instanceof Ping) {
-      handlePing((Ping) message);
-    } else if (message instanceof Terminate) {
-      handleTerminate((Terminate) message);
-    } else if (message instanceof DatabaseManager.GetDatabases) {
-      handleGetDatabases((DatabaseManager.GetDatabases) message);
-    }
-  }
-
-  private void handlePing(Ping message) {
-    LOG.info("Ping message received for user: {}, instance: {}", message.getUsername(), message.getInstanceName());
-    ActorRef databaseManager = databaseManagers.get(message.getUsername());
-    if (databaseManager == null) {
-      databaseManager = createDatabaseManager(message.getUsername(), message.getInstanceName());
-      databaseManagers.put(message.getUsername(), databaseManager);
-      databaseManager.tell(new DatabaseManager.Refresh(message.getUsername()), getSelf());
-    } else {
-      if(message.isImmediate()) {
-        databaseManager.tell(new DatabaseManager.Refresh(message.getUsername(), false), getSelf());
-      }
-      cancelTerminationScheduler(message.getUsername());
-    }
-    scheduleTermination(message.getUsername());
-  }
-
-  private void handleTerminate(Terminate message) {
-    ActorRef databaseManager = databaseManagers.remove(message.username);
-    getContext().stop(databaseManager);
-    cancelTerminationScheduler(message.getUsername());
-  }
-
-  private void handleGetDatabases(DatabaseManager.GetDatabases message) {
-    String username = message.getUsername();
-    ActorRef databaseManager = databaseManagers.get(username);
-    if(databaseManager != null) {
-      databaseManager.tell(message, getSender());
-    } else {
-      // Not database Manager created. Start the database manager with a ping message
-      // and queue up the GetDatabases call to self
-      getSelf().tell(new Ping(username, context.getInstanceName()), getSender());
-      getSelf().tell(message, getSender());
-    }
-  }
-
-  private void cancelTerminationScheduler(String username) {
-    Cancellable cancellable = terminationSchedulers.remove(username);
-    if (!(cancellable == null || cancellable.isCancelled())) {
-      LOG.info("Cancelling termination scheduler");
-      cancellable.cancel();
-    }
-  }
-
-  private void scheduleTermination(String username) {
-    Cancellable cancellable = context().system().scheduler().scheduleOnce(Duration.create(2, TimeUnit.MINUTES),
-        getSelf(), new Terminate(username), getContext().dispatcher(), getSelf());
-    terminationSchedulers.put(username, cancellable);
-  }
-
-  private ActorRef createDatabaseManager(String username, String instanceName) {
-    LOG.info("Creating database manager for username: {}, instance: {}", username, instanceName);
-    return context().actorOf(DatabaseManager.props(context));
-  }
-
-  public static Props props(ViewContext viewContext) {
-    return Props.create(MetaDataManager.class, viewContext);
-  }
-
-  private class Terminate {
-    public final String username;
-
-    public Terminate(String username) {
-      this.username = username;
-    }
-
-    public String getUsername() {
-      return username;
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataRetriever.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataRetriever.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataRetriever.java
deleted file mode 100644
index 64cd69c..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataRetriever.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.actor;
-
-import akka.actor.Props;
-import com.google.common.base.Optional;
-import org.apache.ambari.view.hive20.actor.message.HiveMessage;
-import org.apache.ambari.view.hive20.internal.Connectable;
-import org.apache.ambari.view.hive20.internal.ConnectionException;
-import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
-import org.apache.ambari.view.hive20.internal.dto.TableInfo;
-import org.apache.hive.jdbc.HiveConnection;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.HashSet;
-import java.util.Set;
-
-/**
- *
- */
-public class MetaDataRetriever extends HiveActor {
-
-  private final Logger LOG = LoggerFactory.getLogger(getClass());
-
-  private final Connectable connectable;
-
-  public MetaDataRetriever(Connectable connectable) {
-    this.connectable = connectable;
-  }
-
-  @Override
-  public void handleMessage(HiveMessage hiveMessage) {
-    Object message = hiveMessage.getMessage();
-    if (message instanceof RefreshDB) {
-      handleRefreshDB();
-    }
-  }
-
-  private void handleRefreshDB() {
-    try {
-      refreshDatabaseInfos();
-    } catch (ConnectionException | SQLException e) {
-      LOG.error("Failed to update the complete database information. Exception: {}", e);
-      getSender().tell(new DBRefreshFailed(e), getSelf());
-    }
-  }
-
-  private HiveConnection getHiveConnection() throws ConnectionException {
-    if (!connectable.isOpen()) {
-      connectable.connect();
-    }
-    Optional<HiveConnection> connectionOptional = connectable.getConnection();
-    return connectionOptional.get();
-  }
-
-  private void refreshDatabaseInfos() throws ConnectionException, SQLException {
-    HiveConnection connection = getHiveConnection();
-    Set<DatabaseInfo> infos = new HashSet<>();
-    try (ResultSet schemas = connection.getMetaData().getSchemas()) {
-      while (schemas.next()) {
-        DatabaseInfo info = new DatabaseInfo(schemas.getString(1));
-        infos.add(info);
-      }
-    }
-
-    getSender().tell(new DBRefreshed(infos), getSelf());
-
-    for (DatabaseInfo info : infos) {
-      refreshTablesInfo(info.getName());
-    }
-  }
-
-  private void refreshTablesInfo(String database) throws ConnectionException, SQLException {
-    HiveConnection connection = getHiveConnection();
-    Set<String> currentTableNames = new HashSet<>();
-    try (ResultSet tables = connection.getMetaData().getTables("", database, null, null)) {
-      while (tables.next()) {
-        TableInfo info = new TableInfo(tables.getString(3), tables.getString(4));
-        currentTableNames.add(info.getName());
-        getSender().tell(new TableRefreshed(info, database), getSelf());
-      }
-    }
-    getSender().tell(new AllTableRefreshed(database, currentTableNames), getSelf());
-  }
-
-  public static  Props props(Connectable connectable) {
-    return Props.create(MetaDataRetriever.class, connectable);
-  }
-
-
-  public static class RefreshDB {
-
-  }
-
-  public static class DBRefreshed {
-    private final Set<DatabaseInfo> databases;
-
-    public DBRefreshed(Set<DatabaseInfo> databases) {
-      this.databases = databases;
-    }
-
-    public Set<DatabaseInfo> getDatabases() {
-      return databases;
-    }
-  }
-
-  public static class DBRefreshFailed {
-    private final Exception exception;
-
-    public DBRefreshFailed(Exception exception) {
-      this.exception = exception;
-    }
-
-    public Exception getException() {
-      return exception;
-    }
-  }
-
-  public static  class TableRefreshed {
-    private final TableInfo table;
-    private final String database;
-
-    public TableRefreshed(TableInfo table, String database) {
-      this.table = table;
-      this.database = database;
-    }
-
-    public TableInfo getTable() {
-      return table;
-    }
-
-    public String getDatabase() {
-      return database;
-    }
-  }
-
-  public static class AllTableRefreshed {
-    private final String database;
-    private final Set<String> currentTableNames;
-
-    public AllTableRefreshed(String database, Set<String> currentTableNames) {
-      this.database = database;
-      this.currentTableNames = currentTableNames;
-    }
-
-    public String getDatabase() {
-      return database;
-    }
-
-    public Set<String> getCurrentTableNames() {
-      return currentTableNames;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/TableChangeNotifier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/TableChangeNotifier.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/TableChangeNotifier.java
deleted file mode 100644
index 0581618..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/TableChangeNotifier.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.actor;
-
-import akka.actor.Props;
-import org.apache.ambari.view.hive20.actor.message.HiveMessage;
-import org.apache.ambari.view.hive20.internal.dto.TableInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- *
- */
-public class TableChangeNotifier extends HiveActor {
-  private final Logger LOG = LoggerFactory.getLogger(getClass());
-
-  @Override
-  public void handleMessage(HiveMessage hiveMessage) {
-    Object message = hiveMessage.getMessage();
-    if(message instanceof TableUpdated) {
-      handleTableUpdated((TableUpdated) message);
-    } else if(message instanceof TableAdded) {
-      handleTableAdded((TableAdded) message);
-    } else if(message instanceof TableRemoved) {
-      handleTableRemoved((TableRemoved) message);
-    }
-  }
-
-  private void handleTableUpdated(TableUpdated message) {
-    LOG.info("Tables updated for table name: {}", message.getTableInfo().getName());
-  }
-
-  private void handleTableAdded(TableAdded message) {
-    LOG.info("Tables added for table name: {}", message.getTableInfo().getName());
-  }
-
-  private void handleTableRemoved(TableRemoved message) {
-    LOG.info("Tables removed for table name: {}", message.getTableName());
-  }
-
-  public static Props props() {
-    return Props.create(TableChangeNotifier.class);
-  }
-
-
-  public static class TableAdded {
-    private final TableInfo tableInfo;
-    public TableAdded(TableInfo tableInfo) {
-      this.tableInfo = tableInfo;
-    }
-
-    public TableInfo getTableInfo() {
-      return tableInfo;
-    }
-  }
-
-  public static class TableRemoved {
-    private final String tableName;
-    public TableRemoved(String tableName) {
-      this.tableName = tableName;
-    }
-
-    public String getTableName() {
-      return tableName;
-    }
-  }
-
-
-  public static class TableUpdated {
-    private final TableInfo tableInfo;
-    public TableUpdated(TableInfo tableInfo) {
-      this.tableInfo = tableInfo;
-    }
-
-    public TableInfo getTableInfo() {
-      return tableInfo;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Ping.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Ping.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Ping.java
deleted file mode 100644
index 61df87a..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Ping.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.actor.message;
-
-/**
- * Ping message
- */
-public class Ping {
-  private final String username;
-  private final String instanceName;
-  private final boolean immediate;
-
-  public Ping(String username, String instanceName) {
-    this(username, instanceName, false);
-  }
-
-  public Ping(String username, String instanceName, boolean immediate) {
-    this.username = username;
-    this.instanceName = instanceName;
-    this.immediate = immediate;
-  }
-
-  public String getUsername() {
-    return username;
-  }
-
-  public String getInstanceName() {
-    return instanceName;
-  }
-
-  public boolean isImmediate() {
-    return immediate;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java
index ffa9e132..baa82b4 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java
@@ -20,11 +20,14 @@ package org.apache.ambari.view.hive20.client;
 
 import java.util.List;
 
+import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
+import org.apache.ambari.view.hive20.internal.dto.TableInfo;
+
 public interface DDLDelegator {
 
-  List<String> getDbList(ConnectionConfig config, String like);
+  List<DatabaseInfo> getDbList(ConnectionConfig config, String like);
 
-  List<String> getTableList(ConnectionConfig config, String database, String like);
+  List<TableInfo> getTableList(ConnectionConfig config, String database, String like);
 
   List<Row> getTableDescriptionFormatted(ConnectionConfig config, String database, String table);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java
index 76c7c03..ef4f100 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java
@@ -18,15 +18,11 @@
 
 package org.apache.ambari.view.hive20.client;
 
-import akka.actor.ActorRef;
-import akka.actor.ActorSystem;
-import akka.actor.Inbox;
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.base.Optional;
-import com.google.common.collect.FluentIterable;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import javax.annotation.Nullable;
+
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive20.actor.message.Connect;
 import org.apache.ambari.view.hive20.actor.message.ExecuteJob;
@@ -40,15 +36,23 @@ import org.apache.ambari.view.hive20.actor.message.job.NoMoreItems;
 import org.apache.ambari.view.hive20.actor.message.job.NoResult;
 import org.apache.ambari.view.hive20.actor.message.job.Result;
 import org.apache.ambari.view.hive20.actor.message.job.ResultSetHolder;
+import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
+import org.apache.ambari.view.hive20.internal.dto.TableInfo;
 import org.apache.ambari.view.hive20.utils.HiveActorConfiguration;
 import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import scala.concurrent.duration.Duration;
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.base.Optional;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
 
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Inbox;
+import scala.concurrent.duration.Duration;
 
 public class DDLDelegatorImpl implements DDLDelegator {
 
@@ -69,15 +73,29 @@ public class DDLDelegatorImpl implements DDLDelegator {
   }
 
   @Override
-  public List<String> getDbList(ConnectionConfig config, String like) {
+  public List<DatabaseInfo> getDbList(ConnectionConfig config, String like) {
     Optional<Result> rowsFromDB = getRowsFromDB(config, getDatabaseListStatements(like));
-    return rowsFromDB.isPresent() ? getFirstColumnValues(rowsFromDB.get().getRows()) : Lists.<String>newArrayList();
+    List<String> databaseNames = rowsFromDB.isPresent() ? getFirstColumnValues(rowsFromDB.get().getRows()) : Lists.<String>newArrayList();
+    return FluentIterable.from(databaseNames).transform(new Function<String, DatabaseInfo>() {
+      @Nullable
+      @Override
+      public DatabaseInfo apply(@Nullable String databaseName) {
+        return new DatabaseInfo(databaseName);
+      }
+    }).toList();
   }
 
   @Override
-  public List<String> getTableList(ConnectionConfig config, String database, String like) {
+  public List<TableInfo> getTableList(ConnectionConfig config, String database, String like) {
     Optional<Result> rowsFromDB = getRowsFromDB(config, getTableListStatements(database, like));
-    return rowsFromDB.isPresent() ? getFirstColumnValues(rowsFromDB.get().getRows()) : Lists.<String>newArrayList();
+    List<String> tableNames = rowsFromDB.isPresent() ? getFirstColumnValues(rowsFromDB.get().getRows()) : Lists.<String>newArrayList();
+    return FluentIterable.from(tableNames).transform(new Function<String, TableInfo>() {
+      @Nullable
+      @Override
+      public TableInfo apply(@Nullable String tableName) {
+        return new TableInfo(tableName);
+      }
+    }).toList();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java
index 41be0a0..de282a7 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java
@@ -26,11 +26,9 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
  */
 public class TableInfo {
   private String name;
-  private String type;
 
-  public TableInfo(String name, String type) {
+  public TableInfo(String name) {
     this.name = name;
-    this.type = type;
   }
 
   public String getName() {
@@ -41,13 +39,6 @@ public class TableInfo {
     this.name = name;
   }
 
-  public String getType() {
-    return type;
-  }
-
-  public void setType(String type) {
-    this.type = type;
-  }
 
   @Override
   public boolean equals(Object o) {
@@ -73,7 +64,6 @@ public class TableInfo {
   public String toString() {
     return "TableInfo{" +
         "name='" + name + '\'' +
-        ", type='" + type + '\'' +
         '}';
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java
index 09e1ea9..47acc01 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java
@@ -24,7 +24,6 @@ package org.apache.ambari.view.hive20.internal.dto;
 public class TableResponse {
   private String id;
   private String name;
-  private String type;
   private String databaseId;
 
   public String getId() {
@@ -43,14 +42,6 @@ public class TableResponse {
     this.name = name;
   }
 
-  public String getType() {
-    return type;
-  }
-
-  public void setType(String type) {
-    this.type = type;
-  }
-
   public String getDatabaseId() {
     return databaseId;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
index f5ecdee..e433dc4 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
@@ -18,17 +18,14 @@
 
 package org.apache.ambari.view.hive20.resources.browser;
 
-import akka.actor.ActorRef;
-import akka.actor.ActorSystem;
-import akka.actor.Inbox;
 import com.google.common.base.Function;
 import com.google.common.base.Optional;
 import com.google.common.base.Predicate;
 import com.google.common.base.Strings;
 import com.google.common.collect.FluentIterable;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.ConnectionFactory;
 import org.apache.ambari.view.hive20.ConnectionSystem;
-import org.apache.ambari.view.hive20.actor.DatabaseManager;
 import org.apache.ambari.view.hive20.client.ConnectionConfig;
 import org.apache.ambari.view.hive20.client.DDLDelegator;
 import org.apache.ambari.view.hive20.client.DDLDelegatorImpl;
@@ -56,10 +53,8 @@ import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobController;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobResourceManager;
-import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import scala.concurrent.duration.Duration;
 
 import javax.annotation.Nullable;
 import javax.inject.Inject;
@@ -68,7 +63,6 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.concurrent.TimeUnit;
 
 /**
  *
@@ -93,21 +87,33 @@ public class DDLProxy {
   }
 
   public DatabaseResponse getDatabase(final String databaseId) {
-    Optional<DatabaseInfo> infoOptional = selectDatabase(databaseId);
-    if (!infoOptional.isPresent()) {
-      // Throw exception
-    }
+    DatabaseInfo dbInfo = new DatabaseInfo(databaseId);
+    List<TableInfo> tables = getTableInfos(databaseId);
+    dbInfo.setTables(new HashSet<>(tables));
 
-    return transformToDatabaseResponse(infoOptional.get());
+    return transformToDatabaseResponse(dbInfo);
   }
 
   public Set<TableResponse> getTables(final String databaseId) {
-    Optional<DatabaseInfo> infoOptional = selectDatabase(databaseId);
-    if (!infoOptional.isPresent()) {
-      // Throw exception;
-    }
-    DatabaseInfo info = infoOptional.get();
-    return transformToTablesResponse(info.getTables(), info.getName());
+    List<TableInfo> tables = getTableInfos(databaseId);
+
+    return FluentIterable.from(tables).transform(new Function<TableInfo, TableResponse>() {
+      @Nullable
+      @Override
+      public TableResponse apply(@Nullable TableInfo tableInfo) {
+        TableResponse response = new TableResponse();
+        response.setDatabaseId(databaseId);
+        response.setId(databaseId + "/" + tableInfo.getName());
+        response.setName(tableInfo.getName());
+        return response;
+      }
+    }).toSet();
+  }
+
+  private List<TableInfo> getTableInfos(String databaseId) {
+    ConnectionConfig hiveConnectionConfig = ConnectionFactory.create(context);
+    DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
+    return delegator.getTableList(hiveConnectionConfig, databaseId, "*");
   }
 
   public TableResponse getTable(final String databaseName, final String tableName) {
@@ -190,7 +196,6 @@ public class DDLProxy {
     TableResponse response = new TableResponse();
     response.setId(databaseName + "/" + tableInfo.getName());
     response.setName(tableInfo.getName());
-    response.setType(tableInfo.getType());
     response.setDatabaseId(databaseName);
     return response;
   }
@@ -205,26 +210,10 @@ public class DDLProxy {
   }
 
   private Set<DatabaseInfo> getDatabaseInfos() {
-    ActorRef metaDataManager = ConnectionSystem.getInstance().getMetaDataManager(context);
-    ActorSystem system = ConnectionSystem.getInstance().getActorSystem();
-
-    Inbox inbox = Inbox.create(system);
-
-    inbox.send(metaDataManager, new DatabaseManager.GetDatabases(context.getUsername()));
-    Object receive;
-    try {
-      receive = inbox.receive(Duration.create(60 * 1000, TimeUnit.MILLISECONDS));
-    } catch (Throwable ex) {
-      String errorMessage = "Query timed out to fetch databases information for user: " + context.getUsername();
-      LOG.error(errorMessage, ex);
-      throw new ServiceFormattedException(errorMessage, ex);
-    }
-    Set<DatabaseInfo> infos = new HashSet<>();
-
-    if (receive instanceof DatabaseManager.DatabasesResult) {
-      infos = ((DatabaseManager.DatabasesResult) receive).getDatabases();
-    }
-    return infos;
+    ConnectionConfig hiveConnectionConfig = ConnectionFactory.create(context);
+    DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
+    List<DatabaseInfo> databases = delegator.getDbList(hiveConnectionConfig, "*");
+    return new HashSet<>(databases);
   }
 
   public String generateCreateTableDDL(String databaseName, TableMeta tableMeta) throws ServiceException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
deleted file mode 100644
index 274ea20..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.resources.browser;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.ViewResourceHandler;
-import org.apache.ambari.view.hive20.BaseService;
-import org.apache.ambari.view.hive20.ConnectionSystem;
-import org.apache.ambari.view.hive20.client.ColumnDescription;
-import org.apache.ambari.view.hive20.client.ConnectionConfig;
-import org.apache.ambari.view.hive20.client.Cursor;
-import org.apache.ambari.view.hive20.client.DDLDelegator;
-import org.apache.ambari.view.hive20.client.DDLDelegatorImpl;
-import org.apache.ambari.view.hive20.client.Row;
-import org.apache.ambari.view.hive20.resources.jobs.ResultsPaginationController;
-import org.apache.ambari.view.hive20.utils.BadRequestFormattedException;
-import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.inject.Inject;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.util.List;
-import java.util.concurrent.Callable;
-
-/**
- * Database access resource
- */
-public class HiveBrowserService extends BaseService {
-  @Inject
-  ViewResourceHandler handler;
-  @Inject
-  protected ViewContext context;
-
-  protected final static Logger LOG =
-    LoggerFactory.getLogger(HiveBrowserService.class);
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response databases(@QueryParam("like") String like,
-                            @QueryParam("first") String fromBeginning,
-                            @QueryParam("count") Integer count,
-                            @QueryParam("columns") final String requestedColumns) {
-    if (like == null)
-      like = "*";
-    else
-      like = "*" + like + "*";
-    JSONObject response = new JSONObject();
-    ConnectionConfig hiveConnectionConfig = getHiveConnectionConfig();
-    DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
-    List<String> databases = delegator.getDbList(hiveConnectionConfig, like);
-    response.put("databases", databases);
-
-    return Response.ok(response).build();
-
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database.page")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response databasesPaginated(@QueryParam("like") String like,
-                                     @QueryParam("first") String fromBeginning,
-                                     @QueryParam("count") Integer count,
-                                     @QueryParam("searchId") String searchId,
-                                     @QueryParam("format") String format,
-                                     @QueryParam("columns") final String requestedColumns) {
-    if (like == null)
-      like = "*";
-    else
-      like = "*" + like + "*";
-    String curl = null;
-    try {
-      final String finalLike = like;
-      final DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
-      return ResultsPaginationController.getInstance(context)
-          .request("databases", searchId, false, fromBeginning, count, format, requestedColumns,
-            new Callable<Cursor<Row, ColumnDescription>>() {
-              @Override
-              public Cursor<Row, ColumnDescription> call() throws Exception {
-                return delegator.getDbListCursor(getHiveConnectionConfig(), finalLike);
-              }
-            }).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (IllegalArgumentException ex) {
-      throw new BadRequestFormattedException(ex.getMessage(), ex);
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
-    }
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database/{db}/table")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response tablesInDatabase(@PathParam("db") String db,
-                                   @QueryParam("like") String like,
-                                   @QueryParam("first") String fromBeginning,
-                                   @QueryParam("count") Integer count,
-                                   @QueryParam("columns") final String requestedColumns) {
-    if (like == null)
-      like = "*";
-    else
-      like = "*" + like + "*";
-
-    JSONObject response = new JSONObject();
-    DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
-    List<String> tables = delegator.getTableList(getHiveConnectionConfig(), db, like);
-    response.put("tables", tables);
-    response.put("database", db);
-    return Response.ok(response).build();
-
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database/{db}/table.page")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response tablesInDatabasePaginated(@PathParam("db") final String db,
-                                            @QueryParam("like") String like,
-                                            @QueryParam("first") String fromBeginning,
-                                            @QueryParam("count") Integer count,
-                                            @QueryParam("searchId") String searchId,
-                                            @QueryParam("format") String format,
-                                            @QueryParam("columns") final String requestedColumns) {
-    if (like == null)
-      like = "*";
-    else
-      like = "*" + like + "*";
-    String curl = null;
-    try {
-      final String finalLike = like;
-      final DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
-      try {
-        return ResultsPaginationController.getInstance(context)
-          .request(db + ":tables:", searchId, false, fromBeginning, count, format, requestedColumns,
-            new Callable<Cursor<Row, ColumnDescription>>() {
-              @Override
-              public Cursor<Row, ColumnDescription> call() throws Exception {
-                return delegator.getTableListCursor(getHiveConnectionConfig(), db, finalLike);
-              }
-            }).build();
-      } catch (Exception ex) {
-        throw new ServiceFormattedException(ex.getMessage(), ex);
-      }
-
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (IllegalArgumentException ex) {
-      throw new BadRequestFormattedException(ex.getMessage(), ex);
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
-    }
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database/{db}/table/{table}")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response describeTable(@PathParam("db") String db,
-                                @PathParam("table") String table,
-                                @QueryParam("like") String like,
-                                @QueryParam("columns") String requestedColumns,
-                                @QueryParam("extended") String extended) {
-    boolean extendedTableDescription = (extended != null && extended.equals("true"));
-    String curl = null;
-    try {
-      JSONObject response = new JSONObject();
-      DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
-      List<ColumnDescription> descriptions = delegator.getTableDescription(getHiveConnectionConfig(), db, table, "%", extendedTableDescription);
-      response.put("columns", descriptions);
-      response.put("database", db);
-      response.put("table", table);
-
-      //TODO: New implementation
-
-      return Response.ok(response).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (IllegalArgumentException ex) {
-      throw new BadRequestFormattedException(ex.getMessage(), ex);
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
-    }
-  }
-
-  /**
-   * Returns list of databases
-   */
-  @GET
-  @Path("database/{db}/table/{table}.page")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response describeTablePaginated(@PathParam("db") final String db,
-                                         @PathParam("table") final String table,
-                                         @QueryParam("like") String like,
-                                         @QueryParam("first") String fromBeginning,
-                                         @QueryParam("searchId") String searchId,
-                                         @QueryParam("count") Integer count,
-                                         @QueryParam("format") String format,
-                                         @QueryParam("columns") final String requestedColumns) {
-    if (like == null)
-      like = ".*";
-    else
-      like = ".*" + like + ".*";
-    final String finalLike = like;
-
-    final DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
-    try {
-      return ResultsPaginationController.getInstance(context)
-        .request(db + ":tables:" + table + ":columns", searchId, false, fromBeginning, count, format, requestedColumns,
-          new Callable<Cursor<Row, ColumnDescription>>() {
-            @Override
-            public Cursor<Row, ColumnDescription> call() throws Exception {
-              return delegator.getTableDescriptionCursor(getHiveConnectionConfig(), db, table, finalLike, false);
-            }
-          }).build();
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java
index 1399ee4..dd5bb06 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java
@@ -18,20 +18,16 @@
 
 package org.apache.ambari.view.hive20.resources.system;
 
-import akka.actor.ActorRef;
-import org.apache.ambari.view.hive20.BaseService;
-import org.apache.ambari.view.hive20.ConnectionSystem;
-import org.apache.ambari.view.hive20.actor.message.Ping;
-import org.apache.ambari.view.hive20.resources.system.ranger.RangerService;
-import org.json.simple.JSONObject;
-
+import java.util.List;
 import javax.inject.Inject;
 import javax.ws.rs.GET;
-import javax.ws.rs.POST;
 import javax.ws.rs.Path;
 import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.Response;
-import java.util.List;
+
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.resources.system.ranger.RangerService;
+import org.json.simple.JSONObject;
 
 /**
  * System services which are required for the working of the application
@@ -45,20 +41,6 @@ public class SystemService extends BaseService {
     this.rangerService = rangerService;
   }
 
-  /**
-   * Clients should sent pings to the server at regular interval so that the system could keep alive stuffs or do
-   * cleanup work when the pings stops
-   * @return No content
-   */
-  @POST
-  @Path("ping")
-  public Response ping() {
-    //TODO: Change this to EventBus implementation
-    ActorRef metaDataManager = ConnectionSystem.getInstance().getMetaDataManager(context);
-    metaDataManager.tell(new Ping(context.getUsername(), context.getInstanceName()), ActorRef.noSender());
-    return Response.ok().status(Response.Status.NO_CONTENT).build();
-  }
-
 
   /**
    * Returns if the current user is a cluster operator or ambari administrator

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/MetaDataManagerEventSubmitter.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/MetaDataManagerEventSubmitter.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/MetaDataManagerEventSubmitter.java
deleted file mode 100644
index b23e06e..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/MetaDataManagerEventSubmitter.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.utils;
-
-import akka.actor.ActorRef;
-import com.google.common.base.Optional;
-import org.apache.ambari.view.hive20.ConnectionSystem;
-import org.apache.ambari.view.hive20.actor.message.Ping;
-
-/**
- * Static class to submit event to the MetaData Manager.
- */
-public final class MetaDataManagerEventSubmitter {
-
-  /**
-   * Send a ping message to the MetaDataManager Actor for that instance
-   * @param username Logged-in username
-   * @param instanceName current instance name
-   */
-  public static void sendDBRefresh(String username, String instanceName) {
-    Optional<ActorRef> metaDataManagerOptional = ConnectionSystem.getInstance().getMetaDataManagerIfPresent(instanceName);
-    if(metaDataManagerOptional.isPresent()) {
-      ActorRef metaDataManager = metaDataManagerOptional.get();
-      metaDataManager.tell(new Ping(username, instanceName, true), ActorRef.noSender());
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/adapters/ping.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/ping.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/ping.js
deleted file mode 100644
index f88cfed..0000000
--- a/contrib/views/hive20/src/main/resources/ui/app/adapters/ping.js
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import ApplicationAdapter from './application';
-
-export default ApplicationAdapter.extend({
-  ping() {
-    const url = this.urlForCreateRecord('ping');
-    return this.ajax(url, 'POST');
-  },
-
-  pathForType() {
-    return "system/ping";
-  },
-
-  fetchAuth(databaseName, tableName) {
-    const url = this.buildURL() + '/system/ranger/auth';
-    return this.ajax(url, "GET", {data: {database: databaseName, table: tableName}});
-  }
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/adapters/ranger.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/ranger.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/ranger.js
new file mode 100644
index 0000000..92b6472
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/ranger.js
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import ApplicationAdapter from './application';
+
+export default ApplicationAdapter.extend({
+
+  fetchAuth(databaseName, tableName) {
+    const url = this.buildURL() + '/system/ranger/auth';
+    return this.ajax(url, "GET", {data: {database: databaseName, table: tableName}});
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/models/table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/table.js b/contrib/views/hive20/src/main/resources/ui/app/models/table.js
index 3fdd21a..90400fe 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/models/table.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/table.js
@@ -20,14 +20,7 @@ import DS from 'ember-data';
 
 export default DS.Model.extend({
   name: DS.attr('string'),
-  type: DS.attr('string'),
   database: DS.belongsTo('database'),
   selected: false,
-  icon: Ember.computed('type', function() {
-    if(this.get('type').toLowerCase() === 'view') {
-      return "eye";
-    } else {
-      return "table";
-    }
-  })
+  icon: "table"
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/application.js b/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
index f4ceeb9..448fad2 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
@@ -21,12 +21,7 @@ import tabs from '../configs/top-level-tabs';
 import ENV from 'ui/config/environment';
 
 export default Ember.Route.extend({
-  keepAlive: Ember.inject.service('keep-alive'),
   serviceCheck: Ember.inject.service(),
-  init: function () {
-    this._super(...arguments);
-    this.get('keepAlive').initialize();
-  },
 
   beforeModel() {
     if (ENV.APP.SHOULD_PERFORM_SERVICE_CHECK && !this.get('serviceCheck.checkCompleted')) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/auth.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/auth.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/auth.js
index ec9d1a2..41b16be 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/auth.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/auth.js
@@ -22,6 +22,6 @@ export default TableMetaRouter.extend({
   model(params, transition) {
     let databaseName = transition.params['databases.database']['databaseId'];
     let tableName = transition.params['databases.database.tables.table']['name'];
-    return this.store.adapterFor('ping').fetchAuth(databaseName, tableName);
+    return this.store.adapterFor('ranger').fetchAuth(databaseName, tableName);
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/serializers/database.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/serializers/database.js b/contrib/views/hive20/src/main/resources/ui/app/serializers/database.js
new file mode 100644
index 0000000..99a5ef3
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/serializers/database.js
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DS from 'ember-data';
+
+export default DS.RESTSerializer.extend(DS.EmbeddedRecordsMixin, {
+  attrs: {
+    tables: {embedded: 'always'}
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/serializers/table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/serializers/table.js b/contrib/views/hive20/src/main/resources/ui/app/serializers/table.js
new file mode 100644
index 0000000..0b1aecd
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/serializers/table.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DS from 'ember-data';
+
+export default DS.RESTSerializer.extend(DS.EmbeddedRecordsMixin, {
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/ui/app/services/keep-alive.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/keep-alive.js b/contrib/views/hive20/src/main/resources/ui/app/services/keep-alive.js
deleted file mode 100644
index 6bb12fb..0000000
--- a/contrib/views/hive20/src/main/resources/ui/app/services/keep-alive.js
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import Ember from 'ember';
-
-export default Ember.Service.extend({
-  store: Ember.inject.service(),
-  initialize: function() {
-    this.schedulePing();
-  },
-
-  schedulePing() {
-    this.get('store').adapterFor('ping').ping();
-    Ember.run.later(this.schedulePing.bind(this), 60000);
-  }
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e76e47c/contrib/views/hive20/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/view.xml b/contrib/views/hive20/src/main/resources/view.xml
index 0dd3875..37ddf56 100644
--- a/contrib/views/hive20/src/main/resources/view.xml
+++ b/contrib/views/hive20/src/main/resources/view.xml
@@ -328,11 +328,6 @@
         <service-class>org.apache.ambari.view.hive20.resources.files.FileService</service-class>
     </resource>
 
-    <!--<resource>
-        <name>ddl</name>
-        <service-class>org.apache.ambari.view.hive2.resources.browser.HiveBrowserService</service-class>
-    </resource>-->
-
     <resource>
         <name>hive</name>
         <service-class>org.apache.ambari.view.hive20.HelpService</service-class>