You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by rl...@apache.org on 2015/05/28 18:54:35 UTC

[2/2] ambari git commit: AMBARI-11474. Hive View Should Support NameNode HA & Cluster association (Erik Bergenholtz via rlevas)

AMBARI-11474. Hive View Should Support NameNode HA & Cluster association (Erik Bergenholtz via rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b0634464
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b0634464
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b0634464

Branch: refs/heads/trunk
Commit: b0634464e625fe80fbe693905a46d2555088afec
Parents: b04fb76
Author: Erik Bergenholtz <eb...@hortonworks.com>
Authored: Thu May 28 12:54:20 2015 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Thu May 28 12:54:29 2015 -0400

----------------------------------------------------------------------
 contrib/views/hive/pom.xml                      |  11 +
 .../ambari/view/hive/PropertyValidator.java     |  69 ++--
 .../backgroundjobs/BackgroundJobController.java |  12 +-
 .../ambari/view/hive/client/Connection.java     |   3 +-
 .../view/hive/client/ConnectionFactory.java     |  21 +
 .../resources/PersonalCRUDResourceManager.java  |   3 +-
 .../view/hive/resources/files/FileService.java  |   4 +-
 .../resources/jobs/JobResourceProvider.java     |   5 +-
 .../view/hive/resources/jobs/JobService.java    |   4 +-
 .../view/hive/resources/jobs/LogParser.java     |   3 +-
 .../jobs/ResultsPaginationController.java       |   6 +-
 .../hive/resources/jobs/atsJobs/ATSParser.java  |   3 +
 .../jobs/atsJobs/ATSRequestsDelegateImpl.java   |  12 +-
 .../view/hive/resources/jobs/rm/RMParser.java   |  51 ++-
 .../hive/resources/jobs/rm/RMParserFactory.java |  54 ++-
 .../jobs/rm/RMRequestsDelegateImpl.java         |   1 +
 .../jobs/viewJobs/JobControllerImpl.java        |  25 +-
 .../resources/FileResourceResourceProvider.java |   5 +-
 .../savedQueries/SavedQueryResourceManager.java |  27 +-
 .../SavedQueryResourceProvider.java             |   5 +-
 .../savedQueries/SavedQueryService.java         |   6 +-
 .../resources/udfs/UDFResourceProvider.java     |   5 +-
 .../ambari/view/hive/utils/FilePaginator.java   |   2 +-
 .../apache/ambari/view/hive/utils/HdfsApi.java  | 380 -------------------
 .../apache/ambari/view/hive/utils/HdfsUtil.java | 103 -----
 .../hive/utils/ServiceFormattedException.java   |  17 +-
 .../view/hive/utils/SharedObjectsFactory.java   |  19 +-
 .../ui/hive-web/app/adapters/application.js     |   3 +-
 .../app/components/column-filter-widget.js      |   2 +-
 .../hive-web/app/components/progress-widget.js  |   2 +-
 .../ui/hive-web/app/controllers/history.js      |  21 +-
 .../ui/hive-web/app/controllers/index.js        |   9 +-
 .../app/controllers/index/history-query/logs.js |  15 +-
 .../controllers/index/history-query/results.js  |   2 +-
 .../ui/hive-web/app/controllers/job-progress.js |   6 +-
 .../ui/hive-web/app/initializers/i18n.js        |   1 +
 .../ui/hive-web/app/mixins/filterable.js        |   4 +
 .../resources/ui/hive-web/app/routes/history.js |  10 +-
 .../resources/ui/hive-web/app/routes/queries.js |   8 +-
 .../resources/ui/hive-web/app/routes/udfs.js    |   4 +-
 .../resources/ui/hive-web/app/styles/app.scss   |  15 +-
 .../ui/hive-web/app/templates/history.hbs       |   9 +-
 .../ui/hive-web/app/utils/constants.js          |   4 +-
 .../ui/hive-web/app/views/visual-explain.js     |   2 +-
 .../resources/ui/hive-web/config/environment.js |   5 +
 contrib/views/hive/src/main/resources/view.xml  | 128 +++++--
 .../apache/ambari/view/hive/BaseHiveTest.java   |   2 +
 .../hive/resources/files/FileServiceTest.java   |   2 +-
 .../hive/resources/jobs/AggregatorTest.java     |   3 +-
 .../hive/resources/jobs/JobServiceTest.java     |   9 +-
 .../SavedQueryResourceManagerTest.java          |  55 +++
 .../savedQueries/SavedQueryServiceTest.java     |   2 -
 .../ambari/view/hive/utils/HdfsApiMock.java     |   6 +-
 53 files changed, 555 insertions(+), 630 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive/pom.xml b/contrib/views/hive/pom.xml
index da7e1a1..9638e96 100644
--- a/contrib/views/hive/pom.xml
+++ b/contrib/views/hive/pom.xml
@@ -61,6 +61,7 @@
     <dependency>
       <groupId>com.googlecode.json-simple</groupId>
       <artifactId>json-simple</artifactId>
+      <version>1.1.1</version>
     </dependency>
     <dependency>
       <groupId>commons-configuration</groupId>
@@ -183,6 +184,16 @@
       <version>${hadoop-version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.ambari.contrib.views</groupId>
+      <artifactId>ambari-views-utils</artifactId>
+      <version>0.0.1-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-validator</groupId>
+      <artifactId>commons-validator</artifactId>
+      <version>1.4.0</version>
+    </dependency>
   </dependencies>
 
   <properties>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/PropertyValidator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/PropertyValidator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/PropertyValidator.java
index 61efa49..e1784a7 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/PropertyValidator.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/PropertyValidator.java
@@ -21,6 +21,7 @@ package org.apache.ambari.view.hive;
 import org.apache.ambari.view.ViewInstanceDefinition;
 import org.apache.ambari.view.validation.ValidationResult;
 import org.apache.ambari.view.validation.Validator;
+import org.apache.commons.validator.routines.UrlValidator;
 
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -39,51 +40,65 @@ public class PropertyValidator implements Validator {
 
   @Override
   public ValidationResult validateProperty(String property, ViewInstanceDefinition viewInstanceDefinition, ValidationContext validationContext) {
+    // Validate non cluster associated properties
+    if (property.equals(HIVE_AUTH)) {
+      String auth = viewInstanceDefinition.getPropertyMap().get(HIVE_AUTH);
+
+      if (auth != null && !auth.isEmpty()) {
+        for(String param : auth.split(";")) {
+          String[] keyvalue = param.split("=");
+          if (keyvalue.length != 2) {
+            return new InvalidPropertyValidationResult(false, "Can not parse authentication param " + param + " in " + auth);
+          }
+        }
+      }
+    }
+
+    // if associated with cluster, no need to validate associated properties
+    String cluster = viewInstanceDefinition.getClusterHandle();
+    if (cluster != null) {
+      return ValidationResult.SUCCESS;
+    }
+
+    // Cluster associated properties
     if (property.equals(WEBHDFS_URL)) {
       String webhdfsUrl = viewInstanceDefinition.getPropertyMap().get(WEBHDFS_URL);
-      if (validateURL(webhdfsUrl)) return new InvalidPropertyValidationResult(false, "Must be valid URL");
+      if (!validateURL(webhdfsUrl)) {
+        return new InvalidPropertyValidationResult(false, "Must be valid URL");
+      }
     }
 
     if (property.equals(HIVE_PORT)) {
       String hivePort = viewInstanceDefinition.getPropertyMap().get(HIVE_PORT);
-      try {
-        int port = Integer.valueOf(hivePort);
-        if (port < 1 || port > 65535) {
-          return new InvalidPropertyValidationResult(false, "Must be from 1 to 65535");
+      if (hivePort != null) {
+        try {
+          int port = Integer.valueOf(hivePort);
+          if (port < 1 || port > 65535) {
+            return new InvalidPropertyValidationResult(false, "Must be from 1 to 65535");
+          }
+        } catch (NumberFormatException e) {
+          return new InvalidPropertyValidationResult(false, "Must be integer");
         }
-      } catch (NumberFormatException e) {
-        return new InvalidPropertyValidationResult(false, "Must be integer");
       }
     }
 
     if (property.equals(YARN_ATS_URL)) {
       String atsUrl = viewInstanceDefinition.getPropertyMap().get(YARN_ATS_URL);
-      if (validateURL(atsUrl)) return new InvalidPropertyValidationResult(false, "Must be valid URL");
-    }
-
-    if (property.equals(HIVE_AUTH)) {
-      String auth = viewInstanceDefinition.getPropertyMap().get(HIVE_AUTH);
-
-      if (auth != null && !auth.isEmpty()) {
-        for(String param : auth.split(";")) {
-          String[] keyvalue = param.split("=");
-          if (keyvalue.length != 2) {
-            return new InvalidPropertyValidationResult(false, "Can not parse authentication param " + param + " in " + auth);
-          }
-        }
-      }
+      if (!validateURL(atsUrl)) return new InvalidPropertyValidationResult(false, "Must be valid URL");
     }
 
     return ValidationResult.SUCCESS;
   }
 
+  /**
+   * Validates filesystem URL
+   * @param webhdfsUrl url
+   * @return is url valid
+   */
   public boolean validateURL(String webhdfsUrl) {
-    try {
-      new URI(webhdfsUrl);
-    } catch (URISyntaxException e) {
-      return true;
-    }
-    return false;
+    String[] schemes = {"webhdfs", "hdfs", "s3", "file"};
+    UrlValidator urlValidator = new UrlValidator(schemes);
+    return urlValidator.isValid(webhdfsUrl);
   }
 
   public static class InvalidPropertyValidationResult implements ValidationResult {

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobController.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobController.java
index 6bc284e..2f5c76c 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobController.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobController.java
@@ -52,28 +52,32 @@ public class BackgroundJobController {
   }
 
   public Thread.State state(String key) {
-    if (!jobs.containsKey(key))
+    if (!jobs.containsKey(key)) {
       return Thread.State.TERMINATED;
+    }
 
     Thread.State state = jobs.get(key).getState();
 
-    if (state == Thread.State.TERMINATED)
+    if (state == Thread.State.TERMINATED) {
       jobs.remove(key);
+    }
 
     return state;
   }
 
   public boolean interrupt(String key) {
-    if (!jobs.containsKey(key))
+    if (!jobs.containsKey(key)) {
       return false;
+    }
 
     jobs.get(key).interrupt();
     return true;
   }
 
   public boolean isInterrupted(String key) {
-    if (state(key) == Thread.State.TERMINATED)
+    if (state(key) == Thread.State.TERMINATED) {
       return true;
+    }
 
     return jobs.get(key).isInterrupted();
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
index 9ad16eb..c30b269 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
@@ -188,8 +188,9 @@ public class Connection {
   }
 
   private String getAuthParamDefault(String key, String defaultValue) {
-    if (authParams.containsKey(key))
+    if (authParams.containsKey(key)) {
       return authParams.get(key);
+    }
     return defaultValue;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
index 98256eb..5a4406a 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
@@ -21,19 +21,25 @@ package org.apache.ambari.view.hive.client;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive.utils.HiveClientFormattedException;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.ambari.AmbariApi;
+import org.apache.ambari.view.utils.ambari.AmbariApiException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 public class ConnectionFactory implements IConnectionFactory {
   private final static Logger LOG =
       LoggerFactory.getLogger(ConnectionFactory.class);
   private ViewContext context;
+  private AmbariApi ambariApi;
 
   public ConnectionFactory(ViewContext context) {
     this.context = context;
+    this.ambariApi = new AmbariApi(context);
   }
 
   @Override
@@ -47,6 +53,21 @@ public class ConnectionFactory implements IConnectionFactory {
   }
 
   private String getHiveHost() {
+    if (ambariApi.isClusterAssociated()) {
+      List<String> hiveServerHosts;
+      try {
+        hiveServerHosts = ambariApi.getHostsWithComponent("HIVE_SERVER");
+      } catch (AmbariApiException e) {
+        throw new ServiceFormattedException(e);
+      }
+
+      if (!hiveServerHosts.isEmpty()) {
+        String hostname = hiveServerHosts.get(0);
+        LOG.info("HIVE_SERVER component was found on host " + hostname);
+        return hostname;
+      }
+      LOG.warn("No host was found with HIVE_SERVER component. Using hive.host property to get hostname.");
+    }
     return context.getProperties().get("hive.host");
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/PersonalCRUDResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/PersonalCRUDResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/PersonalCRUDResourceManager.java
index 72a4e8f..e8ce02e 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/PersonalCRUDResourceManager.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/PersonalCRUDResourceManager.java
@@ -70,8 +70,9 @@ public class PersonalCRUDResourceManager<T extends PersonalResource> extends CRU
 
   @Override
   protected boolean checkPermissions(T object) {
-    if (ignorePermissions)
+    if (ignorePermissions) {
       return true;
+    }
     return object.getOwner().compareTo(this.context.getUsername()) == 0;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/files/FileService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/files/FileService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/files/FileService.java
index 860b2c6..6718093 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/files/FileService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/files/FileService.java
@@ -23,6 +23,8 @@ import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.hive.BaseService;
 import org.apache.ambari.view.hive.utils.*;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -218,7 +220,7 @@ public class FileService extends BaseService {
    */
   public static void hdfsSmokeTest(ViewContext context) {
     try {
-      HdfsApi api = HdfsApi.connectToHDFSApi(context);
+      HdfsApi api = HdfsUtil.connectToHDFSApi(context);
       api.getStatus();
     } catch (WebApplicationException ex) {
       throw ex;

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceProvider.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceProvider.java
index 48a544d..2aa491e 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceProvider.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceProvider.java
@@ -61,8 +61,9 @@ public class JobResourceProvider implements ResourceProvider<Job> {
 
   @Override
   public Set<Job> getResources(ReadRequest readRequest) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
-    if (context == null)
-      return new HashSet();
+    if (context == null) {
+      return new HashSet<Job>();
+    }
     return new HashSet<Job>(getResourceManager().readAll(
         new OnlyOwnersFilteringStrategy(this.context.getUsername())));
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
index ae0e828..526f13f 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
@@ -272,8 +272,9 @@ public class JobService extends BaseService {
                              @QueryParam("columns") final String requestedColumns) {
     try {
       final JobController jobController = getResourceManager().readController(jobId);
-      if (!jobController.hasResults())
+      if (!jobController.hasResults()) {
         return ResultsPaginationController.emptyResponse().build();
+      }
 
       return ResultsPaginationController.getInstance(context)
            .request(jobId, searchId, true, fromBeginning, count,
@@ -355,7 +356,6 @@ public class JobService extends BaseService {
       if (remove != null && remove.compareTo("true") == 0) {
         getResourceManager().delete(id);
       }
-//      getResourceManager().delete(Integer.valueOf(queryId));
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/LogParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/LogParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/LogParser.java
index 3952491..54f6757 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/LogParser.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/LogParser.java
@@ -81,8 +81,9 @@ public class LogParser {
 
   public AppId getLastAppInList() {
     Object[] appIds = appsList.toArray();
-    if (appIds.length == 0)
+    if (appIds.length == 0) {
       return null;
+    }
     return (AppId) appIds[appsList.size()-1];
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java
index 8305708..18152ad 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java
@@ -61,8 +61,9 @@ public class ResultsPaginationController {
 
     @Override
     public long expirationTime(String key, Cursor value) {
-      if (key.startsWith("$"))
+      if (key.startsWith("$")) {
         return -1;  //never expire
+      }
       return super.expirationTime(key, value);
     }
   }
@@ -85,8 +86,9 @@ public class ResultsPaginationController {
     if (searchId == null)
       searchId = DEFAULT_SEARCH_ID;
     String effectiveKey = key + "?" + searchId;
-    if (!getResultsCache().containsKey(effectiveKey))
+    if (!getResultsCache().containsKey(effectiveKey)) {
       return false;
+    }
     Cursor cursor = getResultsCache().get(effectiveKey);
     getResultsCache().put(effectiveKey, cursor);
     return true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParser.java
index 6e46fee..7482921 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParser.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParser.java
@@ -27,6 +27,9 @@ import org.slf4j.LoggerFactory;
 import java.util.LinkedList;
 import java.util.List;
 
+/**
+ * Parser of ATS responses
+ */
 public class ATSParser implements IATSParser {
   protected final static Logger LOG =
       LoggerFactory.getLogger(ATSParser.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
index bd477a7..8f12991 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
@@ -39,7 +39,13 @@ public class ATSRequestsDelegateImpl implements ATSRequestsDelegate {
 
   public ATSRequestsDelegateImpl(ViewContext context, String atsUrl) {
     this.context = context;
-    this.atsUrl = atsUrl;
+    this.atsUrl = addProtocolIfMissing(atsUrl);
+  }
+
+  private String addProtocolIfMissing(String atsUrl) {
+    if (!atsUrl.matches("^[^:]+://.*$"))
+      atsUrl = "http://" + atsUrl;
+    return atsUrl;
   }
 
   @Override
@@ -49,7 +55,9 @@ public class ATSRequestsDelegateImpl implements ATSRequestsDelegate {
 
   @Override
   public String hiveQueryIdOperationIdUrl(String operationId) {
-    return atsUrl + "/ws/v1/timeline/HIVE_QUERY_ID?primaryFilter=operationid:" + operationId;
+    // ATS parses operationId started with digit as integer and not returns the response.
+    // Quotation prevents this.
+    return atsUrl + "/ws/v1/timeline/HIVE_QUERY_ID?primaryFilter=operationid:%22" + operationId + "%22";
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMParser.java
index e1d4c73..b39be44 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMParser.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMParser.java
@@ -21,25 +21,54 @@ package org.apache.ambari.view.hive.resources.jobs.rm;
 import org.apache.ambari.view.hive.resources.jobs.atsJobs.TezVertexId;
 import org.json.simple.JSONArray;
 import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
+/**
+ * Parser of Resource Manager responses
+ */
 public class RMParser {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(RMParser.class);
   private RMRequestsDelegate delegate;
 
   public RMParser(RMRequestsDelegate delegate) {
     this.delegate = delegate;
   }
 
+  /**
+   * Progress of DAG
+   * @param appId App Id
+   * @param dagId DAG Id
+   * @return progress of DAG
+   */
   public Double getDAGProgress(String appId, String dagId) {
     String dagIdx = parseDagIdIndex(dagId);
-    JSONObject dagProgress = (JSONObject) delegate.dagProgress(appId, dagIdx).get("dagProgress");
-    return (Double) (dagProgress.get("progress"));
+    JSONObject progresses = delegate.dagProgress(appId, dagIdx);
+
+    double dagProgressValue;
+    if (progresses != null) {
+      JSONObject dagProgress = (JSONObject) progresses.get("dagProgress");
+      dagProgressValue = (Double) (dagProgress.get("progress"));
+    } else {
+      LOG.error("Error while retrieving progress of " + appId + ":" + dagId + ". 0 assumed.");
+      dagProgressValue = 0;
+    }
+    return dagProgressValue;
   }
 
+  /**
+   * Progress of vertices
+   * @param appId App Id
+   * @param dagId DAG Id
+   * @param vertices vertices list
+   * @return list of vertices
+   */
   public List<VertexProgress> getDAGVerticesProgress(String appId, String dagId, List<TezVertexId> vertices) {
     String dagIdx = parseDagIdIndex(dagId);
 
@@ -58,10 +87,22 @@ public class RMParser {
 
     String commaSeparatedVertices = builder.toString();
 
-    JSONArray vertexProgresses = (JSONArray) delegate.verticesProgress(
-        appId, dagIdx, commaSeparatedVertices).get("vertexProgresses");
-
     List<VertexProgress> parsedVertexProgresses = new LinkedList<VertexProgress>();
+    JSONObject vertexProgressesResponse = delegate.verticesProgress(
+        appId, dagIdx, commaSeparatedVertices);
+    if (vertexProgressesResponse == null) {
+      LOG.error("Error while retrieving progress of vertices " +
+          appId + ":" + dagId + ":" + commaSeparatedVertices + ". 0 assumed for all vertices.");
+      for (TezVertexId vertexId : vertices) {
+        VertexProgress vertexProgressInfo = new VertexProgress();
+        vertexProgressInfo.name = vertexId.vertexName;
+        vertexProgressInfo.progress = 0.0;
+        parsedVertexProgresses.add(vertexProgressInfo);
+      }
+      return parsedVertexProgresses;
+    }
+    JSONArray vertexProgresses = (JSONArray) vertexProgressesResponse.get("vertexProgresses");
+
     for (Object vertex : vertexProgresses) {
       JSONObject jsonObject = (JSONObject) vertex;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMParserFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMParserFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMParserFactory.java
index 9733147..596d1d1 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMParserFactory.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMParserFactory.java
@@ -19,8 +19,19 @@
 package org.apache.ambari.view.hive.resources.jobs.rm;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.ambari.AmbariApi;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class RMParserFactory {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(RMParserFactory.class);
+
+  public static final String HTTPS_ONLY = "HTTPS_ONLY";
+  public static final String HTTP_ONLY = "HTTP_ONLY";
+  public static final String YARN_SITE = "yarn-site";
+  public static final String YARN_HTTP_POLICY = "yarn.http.policy";
 
   private ViewContext context;
 
@@ -29,11 +40,48 @@ public class RMParserFactory {
   }
 
   public RMParser getRMParser() {
-    RMRequestsDelegate delegate = new RMRequestsDelegateImpl(context, getRMUrl(context));
+    RMRequestsDelegate delegate = new RMRequestsDelegateImpl(context, getRMUrl());
     return new RMParser(delegate);
   }
 
-  public static String getRMUrl(ViewContext context) {
-    return context.getProperties().get("yarn.resourcemanager.url");
+  public String getRMUrl() {
+    String url;
+
+    AmbariApi ambariApi = new AmbariApi(context);
+
+    if (ambariApi.isClusterAssociated()) {
+      String httpPolicy = ambariApi.getCluster().getConfigurationValue("yarn-site", "yarn.http.policy");
+      if (httpPolicy.equals(HTTPS_ONLY)) {
+        url = ambariApi.getCluster().getConfigurationValue("yarn-site", "yarn.resourcemanager.webapp.https.address");
+      } else {
+        url = ambariApi.getCluster().getConfigurationValue("yarn-site", "yarn.resourcemanager.webapp.address");
+        if (!httpPolicy.equals(HTTP_ONLY))
+          LOG.error(String.format("R040 Unknown value %s of yarn-site/yarn.http.policy. HTTP_ONLY assumed.", httpPolicy));
+      }
+
+    } else {
+      url = context.getProperties().get("yarn.resourcemanager.url");
+    }
+    return addProtocolIfMissing(url);
+  }
+
+  public String addProtocolIfMissing(String url) {
+    if (!url.matches("^[^:]+://.*$")) {
+      AmbariApi ambariApi = new AmbariApi(context);
+      if (!ambariApi.isClusterAssociated()) {
+        throw new ServiceFormattedException(
+            "R030 View is not cluster associated. Resource Manager URL should contain protocol.");
+      }
+
+      String httpPolicy = ambariApi.getCluster().getConfigurationValue(YARN_SITE, YARN_HTTP_POLICY);
+      if (httpPolicy.equals(HTTPS_ONLY)) {
+        url = "https://" + url;
+      } else {
+        url = "http://" + url;
+        if (!httpPolicy.equals(HTTP_ONLY))
+          LOG.error(String.format("R050 Unknown value %s of yarn-site/yarn.http.policy. HTTP_ONLY assumed.", httpPolicy));
+      }
+    }
+    return url;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMRequestsDelegateImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMRequestsDelegateImpl.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMRequestsDelegateImpl.java
index 43186f4..087ef68 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMRequestsDelegateImpl.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/rm/RMRequestsDelegateImpl.java
@@ -20,6 +20,7 @@ package org.apache.ambari.view.hive.resources.jobs.rm;
 
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.ambari.AmbariApi;
 import org.apache.commons.io.IOUtils;
 import org.json.simple.JSONObject;
 import org.json.simple.JSONValue;

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java
index eb2e141..32d64ec 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java
@@ -26,8 +26,9 @@ import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
 import org.apache.ambari.view.hive.resources.savedQueries.SavedQuery;
 import org.apache.ambari.view.hive.resources.savedQueries.SavedQueryResourceManager;
 import org.apache.ambari.view.hive.utils.*;
-import org.apache.ambari.view.hive.utils.HdfsApi;
-import org.apache.ambari.view.hive.utils.HdfsUtil;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
 import org.apache.hive.service.cli.thrift.TSessionHandle;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -110,8 +111,9 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
 
   private TSessionHandle getSession() {
     try {
-      if (job.getSessionTag() != null)
+      if (job.getSessionTag() != null) {
         return hiveConnection.getSessionByTag(getJob().getSessionTag());
+      }
     } catch (HiveClientException ignore) {
       LOG.debug("Stale sessionTag was provided, new session will be opened");
     }
@@ -182,6 +184,8 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
       LOG.error("Error while fetching logs: " + ex.getMessage());
     } catch (ItemNotFound itemNotFound) {
       LOG.debug("No TOperationHandle for job#" + job.getId() + ", can't read logs");
+    } catch (HdfsApiException e) {
+      throw new ServiceFormattedException(e);
     }
   }
 
@@ -270,7 +274,11 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
     LOG.debug("Creating log file for job#" + job.getId());
 
     String logFile = job.getStatusDir() + "/" + "logs";
-    HdfsUtil.putStringToFile(hdfsApi, logFile, "");
+    try {
+      HdfsUtil.putStringToFile(hdfsApi, logFile, "");
+    } catch (HdfsApiException e) {
+      throw new ServiceFormattedException(e);
+    }
 
     job.setLogFile(logFile);
     LOG.debug("Log file for job#" + job.getId() + ": " + logFile);
@@ -278,7 +286,12 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
 
   private void setupStatusDir() {
     String newDirPrefix = makeStatusDirectoryPrefix();
-    String newDir = HdfsUtil.findUnallocatedFileName(hdfsApi, newDirPrefix, "");
+    String newDir = null;
+    try {
+      newDir = HdfsUtil.findUnallocatedFileName(hdfsApi, newDirPrefix, "");
+    } catch (HdfsApiException e) {
+      throw new ServiceFormattedException(e);
+    }
 
     job.setStatusDir(newDir);
     LOG.debug("Status dir for job#" + job.getId() + ": " + newDir);
@@ -329,6 +342,8 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
       throw new ServiceFormattedException("F040 Error when creating file " + jobQueryFilePath, e);
     } catch (InterruptedException e) {
       throw new ServiceFormattedException("F040 Error when creating file " + jobQueryFilePath, e);
+    } catch (HdfsApiException e) {
+      throw new ServiceFormattedException(e);
     }
     job.setQueryFile(jobQueryFilePath);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceProvider.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceProvider.java
index f497f76..a77d10b 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceProvider.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceProvider.java
@@ -60,8 +60,9 @@ public class FileResourceResourceProvider implements ResourceProvider<FileResour
 
   @Override
   public Set<FileResourceItem> getResources(ReadRequest readRequest) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
-    if (context == null)
-      return new HashSet();
+    if (context == null) {
+      return new HashSet<FileResourceItem>();
+    }
     return new HashSet<FileResourceItem>(getResourceManager().readAll(
         new OnlyOwnersFilteringStrategy(this.context.getUsername())));
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManager.java
index 01f9c9c..76a9888 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManager.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManager.java
@@ -23,7 +23,8 @@ import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
 import org.apache.ambari.view.hive.utils.*;
-import org.apache.ambari.view.hive.utils.HdfsUtil;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -78,8 +79,13 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
     String baseFileName = String.format(userScriptsPath +
         "/%s-%s", normalizedName, timestamp);
 
-    String newFilePath = HdfsUtil.findUnallocatedFileName(sharedObjectsFactory.getHdfsApi(), baseFileName, ".hql");
-    HdfsUtil.putStringToFile(sharedObjectsFactory.getHdfsApi(), newFilePath, "");
+    String newFilePath = null;
+    try {
+      newFilePath = HdfsUtil.findUnallocatedFileName(sharedObjectsFactory.getHdfsApi(), baseFileName, ".hql");
+      HdfsUtil.putStringToFile(sharedObjectsFactory.getHdfsApi(), newFilePath, "");
+    } catch (HdfsApiException e) {
+      throw new ServiceFormattedException(e);
+    }
 
     object.setQueryFile(newFilePath);
     storageFactory.getStorage().store(SavedQuery.class, object);
@@ -105,11 +111,24 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
         LOG.error("Can't read query file " + savedQuery.getQueryFile());
         return;
       }
-      savedQuery.setShortQuery(query.substring(0, (query.length() > 42)?42:query.length()));
+      savedQuery.setShortQuery(makeShortQuery(query));
     }
     storageFactory.getStorage().store(SavedQuery.class, savedQuery);
   }
 
+  /**
+   * Generate short preview of query.
+   * Remove SET settings like "set hive.execution.engine=tez;" from beginning
+   * and trim to 42 symbols.
+   * @param query full query
+   * @return shortened query
+   */
+  protected static String makeShortQuery(String query) {
+    query = query.replaceAll("(?i)set\\s+[\\w\\-.]+(\\s*)=(\\s*)[\\w\\-.]+(\\s*);", "");
+    query = query.trim();
+    return query.substring(0, (query.length() > 42)?42:query.length());
+  }
+
   @Override
   public SavedQuery update(SavedQuery newObject, String id) throws ItemNotFound {
     SavedQuery savedQuery = super.update(newObject, id);

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceProvider.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceProvider.java
index ac4f798..5a8c2fd 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceProvider.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceProvider.java
@@ -63,8 +63,9 @@ public class SavedQueryResourceProvider implements ResourceProvider<SavedQuery>
 
   @Override
   public Set<SavedQuery> getResources(ReadRequest readRequest) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
-    if (context == null)
-      return new HashSet();
+    if (context == null) {
+      return new HashSet<SavedQuery>();
+    }
     return new HashSet<SavedQuery>(getResourceManager().readAll(
         new OnlyOwnersFilteringStrategy(this.context.getUsername())));
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryService.java
index f55c1fd..9ea19c6 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryService.java
@@ -22,10 +22,10 @@ import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.hive.BaseService;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.ambari.view.hive.utils.HdfsApi;
-import org.apache.ambari.view.hive.utils.HdfsUtil;
 import org.apache.ambari.view.hive.utils.NotFoundFormattedException;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
 import org.json.simple.JSONObject;
 import org.json.simple.JSONValue;
 import org.slf4j.Logger;
@@ -211,7 +211,7 @@ public class SavedQueryService extends BaseService {
       HdfsApi hdfsApi = getSharedObjectsFactory().getHdfsApi();
 
       HdfsUtil.putStringToFile(hdfsApi, defaultsFile,
-                               settings.toString());
+          settings.toString());
       String defaults = HdfsUtil.readFile(hdfsApi, defaultsFile);
       return Response.ok(JSONValue.parse(defaults)).build();
     } catch (WebApplicationException ex) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceProvider.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceProvider.java
index 3670ca1..ba83b03 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceProvider.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceProvider.java
@@ -61,8 +61,9 @@ public class UDFResourceProvider implements ResourceProvider<UDF> {
 
   @Override
   public Set<UDF> getResources(ReadRequest readRequest) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
-    if (context == null)
-      return new HashSet();
+    if (context == null) {
+      return new HashSet<UDF>();
+    }
     return new HashSet<UDF>(getResourceManager().readAll(
         new OnlyOwnersFilteringStrategy(this.context.getUsername())));
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/FilePaginator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/FilePaginator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/FilePaginator.java
index 6282fc9..eb1a401 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/FilePaginator.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/FilePaginator.java
@@ -18,7 +18,7 @@
 
 package org.apache.ambari.view.hive.utils;
 
-import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.hadoop.fs.FSDataInputStream;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsApi.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsApi.java
deleted file mode 100644
index cbc4d4b..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsApi.java
+++ /dev/null
@@ -1,380 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.utils;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.json.simple.JSONArray;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.net.URI;
-import java.security.PrivilegedExceptionAction;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-/**
- * HDFS Business Delegate
- */
-public class HdfsApi {
-  private Configuration conf = new Configuration();
-
-  private FileSystem fs;
-  private Map<String, String> params;
-
-  private UserGroupInformation ugi;
-  private final static Logger LOG =
-      LoggerFactory.getLogger(HdfsApi.class);
-
-  /**
-   * Constructor
-   * @param defaultFs hdfs uri
-   * @param username user.name
-   * @throws java.io.IOException
-   * @throws InterruptedException
-   */
-  public HdfsApi(final String defaultFs, String username, Map<String, String> params) throws IOException,
-      InterruptedException {
-    this.params = params;
-    Thread.currentThread().setContextClassLoader(null);
-    conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
-    conf.set("fs.webhdfs.impl", WebHdfsFileSystem.class.getName());
-    conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
-
-    ugi = UserGroupInformation.createProxyUser(username, getProxyUser());
-
-    fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
-      public FileSystem run() throws IOException {
-        return FileSystem.get(URI.create(defaultFs), conf);
-      }
-    });
-  }
-
-  private UserGroupInformation getProxyUser() throws IOException {
-    UserGroupInformation proxyuser;
-    if (params.containsKey("proxyuser")) {
-      proxyuser = UserGroupInformation.createRemoteUser(params.get("proxyuser"));
-    } else {
-      proxyuser = UserGroupInformation.getCurrentUser();
-    }
-
-    proxyuser.setAuthenticationMethod(getAuthenticationMethod());
-    return proxyuser;
-  }
-
-  private UserGroupInformation.AuthenticationMethod getAuthenticationMethod() {
-    UserGroupInformation.AuthenticationMethod authMethod;
-    if (params.containsKey("auth")) {
-      authMethod = UserGroupInformation.AuthenticationMethod.valueOf(params.get("auth"));
-    } else {
-      authMethod = UserGroupInformation.AuthenticationMethod.SIMPLE;
-    }
-    return authMethod;
-  }
-
-  /**
-   * List dir operation
-   * @param path path
-   * @return array of FileStatus objects
-   * @throws java.io.FileNotFoundException
-   * @throws java.io.IOException
-   * @throws InterruptedException
-   */
-  public synchronized FileStatus[] listdir(final String path) throws FileNotFoundException,
-      IOException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<FileStatus[]>() {
-      public FileStatus[] run() throws FileNotFoundException, Exception {
-        return fs.listStatus(new Path(path));
-      }
-    });
-  }
-
-  /**
-   * Get file status
-   * @param path path
-   * @return file status
-   * @throws java.io.IOException
-   * @throws java.io.FileNotFoundException
-   * @throws InterruptedException
-   */
-  public synchronized FileStatus getFileStatus(final String path) throws IOException,
-      FileNotFoundException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<FileStatus>() {
-      public FileStatus run() throws FileNotFoundException, IOException {
-        return fs.getFileStatus(new Path(path));
-      }
-    });
-  }
-
-  /**
-   * Make directory
-   * @param path path
-   * @return success
-   * @throws java.io.IOException
-   * @throws InterruptedException
-   */
-  public synchronized boolean mkdir(final String path) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return fs.mkdirs(new Path(path));
-      }
-    });
-  }
-
-  /**
-   * Rename
-   * @param src source path
-   * @param dst destination path
-   * @return success
-   * @throws java.io.IOException
-   * @throws InterruptedException
-   */
-  public synchronized boolean rename(final String src, final String dst) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return fs.rename(new Path(src), new Path(dst));
-      }
-    });
-  }
-
-  /**
-   * Delete
-   * @param path path
-   * @param recursive delete recursive
-   * @return success
-   * @throws java.io.IOException
-   * @throws InterruptedException
-   */
-  public synchronized boolean delete(final String path, final boolean recursive)
-      throws IOException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return fs.delete(new Path(path), recursive);
-      }
-    });
-  }
-
-  /**
-   * Home directory
-   * @return home directory
-   * @throws Exception
-   */
-  public synchronized Path getHomeDir() throws Exception {
-    return ugi.doAs(new PrivilegedExceptionAction<Path>() {
-      public Path run() throws IOException {
-        return fs.getHomeDirectory();
-      }
-    });
-  }
-
-  /**
-   * Hdfs Status
-   * @return home directory
-   * @throws Exception
-   */
-  public synchronized FsStatus getStatus() throws Exception {
-    return ugi.doAs(new PrivilegedExceptionAction<FsStatus>() {
-      public FsStatus run() throws IOException {
-        return fs.getStatus();
-      }
-    });
-  }
-
-  /**
-   * Create file
-   * @param path path
-   * @param overwrite overwrite existent file
-   * @return output stream
-   * @throws java.io.IOException
-   * @throws InterruptedException
-   */
-  public synchronized FSDataOutputStream create(final String path, final boolean overwrite)
-      throws IOException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
-      public FSDataOutputStream run() throws Exception {
-        return fs.create(new Path(path), overwrite);
-      }
-    });
-  }
-
-  /**
-   * Open file
-   * @param path path
-   * @return input stream
-   * @throws java.io.IOException
-   * @throws InterruptedException
-   */
-  public synchronized FSDataInputStream open(final String path) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<FSDataInputStream>() {
-      public FSDataInputStream run() throws Exception {
-        return fs.open(new Path(path));
-      }
-    });
-  }
-
-  /**
-   * Copy file
-   * @param src source path
-   * @param dest destination path
-   * @return success
-   * @throws java.io.IOException
-   * @throws InterruptedException
-   */
-  public synchronized void copy(final String src, final String dest) throws IOException,
-      InterruptedException {
-    boolean result = ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return FileUtil.copy(fs, new Path(src), fs, new Path(dest), false, conf);
-      }
-    });
-    if (!result) {
-      throw new ServiceFormattedException("F050 Can't copy source file from " + src + " to " + dest);
-    }
-  }
-
-  public synchronized boolean exists(final String newFilePath) throws IOException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return fs.exists(new Path(newFilePath));
-      }
-    });
-  }
-
-  /**
-   * Converts a Hadoop permission into a Unix permission symbolic representation
-   * (i.e. -rwxr--r--) or default if the permission is NULL.
-   *
-   * @param p
-   *          Hadoop permission.
-   * @return the Unix permission symbolic representation or default if the
-   *         permission is NULL.
-   */
-  private static String permissionToString(FsPermission p) {
-    return (p == null) ? "default" : "-" + p.getUserAction().SYMBOL
-        + p.getGroupAction().SYMBOL + p.getOtherAction().SYMBOL;
-  }
-
-  /**
-   * Converts a Hadoop <code>FileStatus</code> object into a JSON array object.
-   * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
-   * specified URL.
-   * <p/>
-   *
-   * @param status
-   *          Hadoop file status.
-   * @return The JSON representation of the file status.
-   */
-
-  public static Map<String, Object> fileStatusToJSON(FileStatus status) {
-    Map<String, Object> json = new LinkedHashMap<String, Object>();
-    json.put("path", status.getPath().toString());
-    json.put("isDirectory", status.isDirectory());
-    json.put("len", status.getLen());
-    json.put("owner", status.getOwner());
-    json.put("group", status.getGroup());
-    json.put("permission", permissionToString(status.getPermission()));
-    json.put("accessTime", status.getAccessTime());
-    json.put("modificationTime", status.getModificationTime());
-    json.put("blockSize", status.getBlockSize());
-    json.put("replication", status.getReplication());
-    return json;
-  }
-
-  /**
-   * Converts a Hadoop <code>FileStatus</code> array into a JSON array object.
-   * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
-   * specified URL.
-   * <p/>
-   *
-   * @param status
-   *          Hadoop file status array.
-   * @return The JSON representation of the file status array.
-   */
-  @SuppressWarnings("unchecked")
-  public static JSONArray fileStatusToJSON(FileStatus[] status) {
-    JSONArray json = new JSONArray();
-    if (status != null) {
-      for (FileStatus s : status) {
-        json.add(fileStatusToJSON(s));
-      }
-    }
-    return json;
-  }
-
-  public static synchronized HdfsApi connectToHDFSApi(ViewContext context) {
-    HdfsApi api = null;
-    Thread.currentThread().setContextClassLoader(null);
-
-    String defaultFS = context.getProperties().get("webhdfs.url");
-    if (defaultFS == null) {
-      String message = "webhdfs.url is not configured!";
-      LOG.error(message);
-      throw new MisconfigurationFormattedException("webhdfs.url");
-    }
-
-    try {
-      api = new HdfsApi(defaultFS, getHdfsUsername(context), getHdfsAuthParams(context));
-      LOG.info("HdfsApi connected OK");
-    } catch (IOException e) {
-      String message = "F060 Couldn't open connection to HDFS";
-      LOG.error(message);
-      throw new ServiceFormattedException(message, e);
-    } catch (InterruptedException e) {
-      String message = "F060 Couldn't open connection to HDFS";
-      LOG.error(message);
-      throw new ServiceFormattedException(message, e);
-    }
-    return api;
-  }
-
-  private static Map<String, String> getHdfsAuthParams(ViewContext context) {
-    String auth = context.getProperties().get("webhdfs.auth");
-    Map<String, String> params = new HashMap<String, String>();
-    if (auth == null || auth.isEmpty()) {
-      auth = "auth=SIMPLE";
-    }
-    for(String param : auth.split(";")) {
-      String[] keyvalue = param.split("=");
-      if (keyvalue.length != 2) {
-        LOG.error("Can not parse authentication param " + param + " in " + auth);
-        continue;
-      }
-      params.put(keyvalue[0], keyvalue[1]);
-    }
-    return params;
-  }
-
-  public static String getHdfsUsername(ViewContext context) {
-    String userName = context.getProperties().get("webhdfs.username");
-    if (userName == null || userName.compareTo("null") == 0 || userName.compareTo("") == 0)
-      userName = context.getUsername();
-    return userName;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsUtil.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsUtil.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsUtil.java
deleted file mode 100644
index aeeb1b7..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsUtil.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.utils;
-
-
-import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-
-public class HdfsUtil {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(HdfsUtil.class);
-
-  /**
-   * Write string to file with overwriting
-   * @param filePath path to file
-   * @param content new content of file
-   */
-  public static void putStringToFile(HdfsApi hdfs, String filePath, String content) {
-    FSDataOutputStream stream;
-    try {
-      synchronized (hdfs) {
-        stream = hdfs.create(filePath, true);
-        stream.writeBytes(content);
-        stream.close();
-      }
-    } catch (IOException e) {
-      throw new ServiceFormattedException("F070 Could not write file " + filePath, e);
-    } catch (InterruptedException e) {
-      throw new ServiceFormattedException("F071 Could not write file " + filePath, e);
-    }
-  }
-
-
-  /**
-   * Read string from file
-   * @param filePath path to file
-   */
-  public static String readFile(HdfsApi hdfs, String filePath) {
-    FSDataInputStream stream;
-    try {
-      stream = hdfs.open(filePath);
-      return IOUtils.toString(stream);
-    } catch (IOException e) {
-      throw new ServiceFormattedException("F080 Could not read file " + filePath, e);
-    } catch (InterruptedException e) {
-      throw new ServiceFormattedException("F081 Could not read file " + filePath, e);
-    }
-  }
-
-
-  /**
-   * Increment index appended to filename until find first unallocated file
-   * @param fullPathAndFilename path to file and prefix for filename
-   * @param extension file extension
-   * @return if fullPathAndFilename="/tmp/file",extension=".txt" then filename will be like "/tmp/file_42.txt"
-   */
-  public static String findUnallocatedFileName(HdfsApi hdfs, String fullPathAndFilename, String extension) {
-    int triesCount = 0;
-    String newFilePath;
-    boolean isUnallocatedFilenameFound;
-
-    try {
-      do {
-        newFilePath = String.format(fullPathAndFilename + "%s" + extension, (triesCount == 0) ? "" : "_" + triesCount);
-        LOG.debug("Trying to find free filename " + newFilePath);
-
-        isUnallocatedFilenameFound = !hdfs.exists(newFilePath);
-        if (isUnallocatedFilenameFound) {
-          LOG.debug("File created successfully!");
-        }
-
-        triesCount += 1;
-      } while (!isUnallocatedFilenameFound);
-    } catch (IOException e) {
-      throw new ServiceFormattedException("F080 Error in creation " + fullPathAndFilename + "...", e);
-    } catch (InterruptedException e) {
-      throw new ServiceFormattedException("F080 Error in creation " + fullPathAndFilename + "...", e);
-    }
-
-    return newFilePath;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/ServiceFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/ServiceFormattedException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/ServiceFormattedException.java
index e9698c3..eaea2ff 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/ServiceFormattedException.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/ServiceFormattedException.java
@@ -39,6 +39,10 @@ public class ServiceFormattedException extends WebApplicationException {
     super(errorEntity(message, null, suggestStatus(null), null));
   }
 
+  public ServiceFormattedException(Throwable exception) {
+    super(errorEntity(null, exception, suggestStatus(exception), null));
+  }
+
   public ServiceFormattedException(String message, Throwable exception) {
     super(errorEntity(message, exception, suggestStatus(exception), null));
   }
@@ -53,7 +57,9 @@ public class ServiceFormattedException extends WebApplicationException {
 
   private static int suggestStatus(Throwable exception) {
     int status = 500;
-    if (exception == null) return status;
+    if (exception == null) {
+      return status;
+    }
     if (exception instanceof AccessControlException) {
       status = 403;
     }
@@ -73,7 +79,14 @@ public class ServiceFormattedException extends WebApplicationException {
       trace += sw.toString();
 
       if (message == null) {
-        response.put("message", "E090 " + e.getClass().getSimpleName());
+        String innerMessage = e.getMessage();
+        String autoMessage;
+
+        if (innerMessage != null)
+          autoMessage = String.format("E090 %s [%s]", innerMessage, e.getClass().getSimpleName());
+        else
+          autoMessage = "E090 " + e.getClass().getSimpleName();
+        response.put("message", autoMessage);
       }
     }
     response.put("trace", trace);

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java
index 05ff7b6..df67985 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java
@@ -34,6 +34,11 @@ import org.apache.ambari.view.hive.resources.jobs.rm.RMParserFactory;
 import org.apache.ambari.view.hive.resources.jobs.viewJobs.IJobControllerFactory;
 import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobControllerFactory;
 import org.apache.ambari.view.hive.resources.savedQueries.SavedQueryResourceManager;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.HashMap;
 import java.util.Map;
@@ -44,6 +49,9 @@ import java.util.Map;
  * will use different connection.
  */
 public class SharedObjectsFactory implements IStorageFactory, IConnectionFactory {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(SharedObjectsFactory.class);
+
   private ViewContext context;
   private final IConnectionFactory hiveConnectionFactory;
   private final IStorageFactory storageFactory;
@@ -137,8 +145,15 @@ public class SharedObjectsFactory implements IStorageFactory, IConnectionFactory
 
   // =============================
   public HdfsApi getHdfsApi() {
-    if (!localObjects.get(HdfsApi.class).containsKey(getTagName()))
-      localObjects.get(HdfsApi.class).put(getTagName(), HdfsApi.connectToHDFSApi(context));
+    if (!localObjects.get(HdfsApi.class).containsKey(getTagName())) {
+      try {
+        localObjects.get(HdfsApi.class).put(getTagName(), HdfsUtil.connectToHDFSApi(context));
+      } catch (HdfsApiException e) {
+        String message = "F060 Couldn't open connection to HDFS";
+        LOG.error(message);
+        throw new ServiceFormattedException(message, e);
+      }
+    }
     return (HdfsApi) localObjects.get(HdfsApi.class).get(getTagName());
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/application.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/application.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/application.js
index e913c77..5e7a05b 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/application.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/application.js
@@ -22,7 +22,8 @@ import constants from 'hive/utils/constants';
 export default DS.RESTAdapter.extend({
   headers: {
     'X-Requested-By': 'ambari',
-    'Content-Type': 'application/json'
+    'Content-Type': 'application/json',
+    // 'Authorization': 'Basic YWRtaW46YWRtaW4='
   },
 
   buildURL: function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/components/column-filter-widget.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/components/column-filter-widget.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/column-filter-widget.js
index 0b5a9ba..461dabe 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/components/column-filter-widget.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/column-filter-widget.js
@@ -43,7 +43,7 @@ export default Ember.Component.extend({
     },
 
     sendFilter: function (params) {
-      if (params.from && params.to) {
+      if (params && (params.from || params.from === 0) && (params.to || params.to === 0)) {
         this.set('filterValue', Ember.Object.create({
           min: params.from,
           max: params.to

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/components/progress-widget.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/components/progress-widget.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/progress-widget.js
index d7c3fda..9a459c3 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/components/progress-widget.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/progress-widget.js
@@ -22,7 +22,7 @@ export default Ember.Component.extend({
   tagName: 'progress-bar',
 
   updateValue: function () {
-    var progress = this.get('value') || 0;
+    var progress = this.get('value') ? this.get('value').toFixed() : 0;
 
     this.set('style', 'width: %@%'.fmt(progress));
     this.set('percentage', '%@%'.fmt(progress));

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/history.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/history.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/history.js
index 30e9b9b..c19b131 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/history.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/history.js
@@ -136,11 +136,22 @@ export default Ember.ArrayController.extend(FilterableMixin, {
     },
 
     interruptJob: function (job) {
-      var self = this,
-          id = job.get('id');
-
-      job.destroyRecord().then(function () {
-        self.store.find(constants.namingConventions.job, id);
+      var self = this;
+      var id = job.get('id');
+      var url = this.container.lookup('adapter:application').buildURL();
+      url +=  "/jobs/" + id;
+
+      job.set('isCancelling', true);
+
+      Ember.$.ajax({
+         url: url,
+         type: 'DELETE',
+         headers: {
+          'X-Requested-By': 'ambari',
+         },
+         success: function () {
+           job.reload();
+         }
       });
     },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
index 0e4ac32..a31e984 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
@@ -129,7 +129,9 @@ export default Ember.Controller.extend({
       referrer: referrer
     });
 
-    originalModel.set('isRunning', true);
+    if (!shouldGetVisualExplain) {
+      originalModel.set('isRunning', true);
+    }
 
      //if it's a saved query / history entry set the queryId
     if (!originalModel.get('isNew')) {
@@ -178,14 +180,11 @@ export default Ember.Controller.extend({
     job.save().then(function () {
       self.get('results').getResultsJson(job).then(function (json) {
         defer.resolve(json);
-        originalModel.set('isRunning', undefined);
       }, function (err) {
         defer.reject(err);
-        originalModel.set('isRunning', undefined);
       });
     }, function (err) {
       defer.reject(err);
-        originalModel.set('isRunning', undefined);
     });
 
     return defer.promise;
@@ -417,7 +416,7 @@ export default Ember.Controller.extend({
 
     var self = this;
 
-    var file = "/tmp/" + path + ".csv";
+    var file = path + ".csv";
     var url = this.container.lookup('adapter:application').buildURL();
     url +=  "/jobs/" + job.get('id') + "/results/csv/saveToHDFS";
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/logs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/logs.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/logs.js
index c75fffe..3c710d3 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/logs.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/logs.js
@@ -42,10 +42,12 @@ export default Ember.ObjectController.extend({
         };
 
     job.reload().then(function () {
-      if (utils.insensitiveCompare(job.get('status'), constants.statuses.error)) {
+      if (utils.insensitiveCompare(job.get('status'), constants.statuses.error) ||
+          utils.insensitiveCompare(job.get('status'), constants.statuses.failed)) {
         handleError(job.get('statusMessage'));
-      } else {
-        self.get('files').reload(job.get('logFile')).then(function (file) {
+      }
+
+      self.get('files').reload(job.get('logFile')).then(function (file) {
         var fileContent = file.get('fileContent');
 
         if (fileContent) {
@@ -53,10 +55,9 @@ export default Ember.ObjectController.extend({
         }
 
         defer.resolve();
-        },function (err) {
-          handleError(err);
-        });
-      }
+      },function (err) {
+        handleError(err);
+      });
     }, function (err) {
       handleError(err);
     });

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/results.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/results.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/results.js
index cad96fa..5c7ded2 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/results.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/results.js
@@ -91,7 +91,7 @@ export default Ember.ObjectController.extend({
           //backend issue, this will be split in done and fail callbacks once its fixed.
           if (data.status === 404) {
             existingJob.set('results', []);
-            self.send('getNextPage', true, job);
+            self.set('error', data.responseJSON.message);
           } else if (data.status === 200) {
             self.keepAlive(job);
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/job-progress.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/job-progress.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/job-progress.js
index 737506e..c80ae81 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/job-progress.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/job-progress.js
@@ -63,7 +63,7 @@ export default Ember.Controller.extend({
 
           self.set('totalProgress', total);
 
-          if (job.get('isRunning')) {
+          if (job.get('isRunning') && total < 100) {
             reloadProgress();
           }
 
@@ -77,14 +77,14 @@ export default Ember.Controller.extend({
     this.set('stages', []);
     this.set('totalProgress', 0);
 
-    if (!job.get('applicationId')) {
+    if (!job.get('dagId')) {
       return;
     }
 
     url += '/' + constants.namingConventions.jobs + '/' + job.get('id') + '/progress';
 
     reloadProgress();
-  }.observes('index.model', 'index.model.applicationId'),
+  }.observes('index.model', 'index.model.dagId'),
 
   displayProgress: function () {
     return this.get('index.model.constructor.typeKey') === constants.namingConventions.job;

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
index 632e5a0..09d57b5 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
@@ -183,6 +183,7 @@ TRANSLATIONS = {
     history: 'History',
     ok: 'OK',
     stopJob: 'Stop execution',
+    stoppingJob: 'Stopping...',
     close: 'Close',
     clearFilters: 'Clear filters',
     expand: 'Expand message',

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/mixins/filterable.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/mixins/filterable.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/mixins/filterable.js
index 5758bf9..aa1f4cd 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/mixins/filterable.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/mixins/filterable.js
@@ -59,6 +59,10 @@ export default Ember.Mixin.create({
 
   updateFilters: function (property, filterValue, exactMatch) {
     var addFilter = function () {
+      if (!filterValue) {
+        return;
+      }
+
       this.get('filters').pushObject(Ember.Object.create({
         property: property,
         exactMatch: exactMatch,

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/history.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/history.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/history.js
index 0028421..5363f7e 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/history.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/history.js
@@ -21,10 +21,18 @@ import constants from 'hive/utils/constants';
 
 export default Ember.Route.extend({
   model: function () {
-    return this.store.find(constants.namingConventions.job);
+    var self = this;
+
+    return this.store.find(constants.namingConventions.job).catch(function (err) {
+      self.notify.error(err.responseJSON.message, err.responseJSON.trace);
+    });
   },
 
   setupController: function (controller, model) {
+    if (!model) {
+      return;
+    }
+
     var filteredModel = model.filter(function (job) {
        //filter out jobs with referrer type of sample, explain and visual explain
        return !job.get('referrer') ||

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/queries.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/queries.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/queries.js
index a10bbd7..6ed55b5 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/queries.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/queries.js
@@ -21,10 +21,16 @@ import constants from 'hive/utils/constants';
 
 export default Ember.Route.extend({
   model: function () {
-    return this.store.find(constants.namingConventions.savedQuery);
+    return this.store.find(constants.namingConventions.savedQuery).catch(function (err) {
+      self.notify.error(err.responseJSON.message, err.responseJSON.trace);
+    });
   },
 
   setupController: function (controller, model) {
+    if (!model) {
+      return;
+    }
+
     controller.set('queries', model);
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/udfs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/udfs.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/udfs.js
index 466f632..9093197 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/udfs.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/udfs.js
@@ -25,6 +25,8 @@ export default Ember.Route.extend({
 
     this.store.find(constants.namingConventions.fileResource).then(function (fileResources) {
       self.controllerFor(constants.namingConventions.fileResources).set('model', fileResources);
-    });
+    }).catch(function (err) {
+      self.notify.error(err.responseJSON.message, err.responseJSON.trace);
+    });;
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss b/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
index 6264b9e..3ae64ec 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
@@ -205,7 +205,7 @@ aside  {
   color: green;
 }
 
-.CANCELED, .ERROR {
+.CANCELED, .ERROR, .FAILED, .KILLED {
   color: red;
 }
 
@@ -338,6 +338,11 @@ body {
     width: 20px;
     height: 20px;
   }
+
+  &.inline-spinner {
+    vertical-align: middle;
+    display: inline-block;
+  }
 }
 
 .databases {
@@ -521,14 +526,14 @@ tree-view ul li {
         border-radius: 0;
         margin: 0 10px 10px 10px;
       }
-
-      .progress-bar {
-        min-width: 2em;
-      }
     }
   }
 }
 
+.progress-bar {
+  min-width: 2em;
+}
+
 .messages-controls {
   margin: 0 0 10px;
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/history.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/history.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/history.hbs
index 313a233..d98fbcd 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/history.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/history.hbs
@@ -60,7 +60,14 @@
             {{code-helper controller.file.fileContent}}
 
             {{#if controller.canStop}}
-              <button type="button" class="btn btn-danger btn-sm pull-right" {{action "stop"}}>{{t "buttons.stopJob"}}</button>
+              <button type="button" class="btn btn-danger btn-sm pull-right" {{action "stop"}}>
+                {{#if item.isCancelling}}
+                  {{t "buttons.stoppingJob"}}
+                  <div class="spinner small inline-spinner"></div>
+                {{else}}
+                  {{t "buttons.stopJob"}}
+                {{/if}}
+              </button>
             {{/if}}
           </td>
         </tr>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
index 70cd374..d63c5ce 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
@@ -169,6 +169,8 @@ export default Ember.Object.create({
     canceled: "CANCELED",
     closed: "CLOSED",
     error: "ERROR",
+    failed: 'FAILED',
+    killed: 'KILLED',
     pending: "PENDING"
   },
 
@@ -191,7 +193,7 @@ export default Ember.Object.create({
 
   //this can be replaced by a string.format implementation
   adapter: {
-    version: '0.3.0',
+    version: '0.4.0',
     instance: 'Hive',
     apiPrefix: '/api/v1/views/HIVE/versions/',
     instancePrefix: '/instances/',

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/app/views/visual-explain.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/views/visual-explain.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/views/visual-explain.js
index 1585a40..52d92b6 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/views/visual-explain.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/views/visual-explain.js
@@ -69,7 +69,7 @@ export default Ember.View.extend({
         }
       });
     });
-  }.observes('controller.verticesProgress', 'verticesGroups'),
+  }.observes('controller.verticesProgress.@each.value', 'verticesGroups'),
 
   jsonChanged: function () {
     if (this.get('controller.json')) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/ui/hive-web/config/environment.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/config/environment.js b/contrib/views/hive/src/main/resources/ui/hive-web/config/environment.js
index 86da93e..992d91c 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/config/environment.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/config/environment.js
@@ -31,6 +31,11 @@ module.exports = function(environment) {
       }
     },
 
+    contentSecurityPolicy: {
+      'connect-src': "'self' ws://localhost:35729 ws://0.0.0.0:35729",
+      'style-src': "'self' 'unsafe-inline'"
+    },
+
     APP: {
       // Here you can pass flags/options to your application instance
       // when it is created