You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by rl...@apache.org on 2015/06/08 15:19:12 UTC

ambari git commit: AMBARI-11719. Pig View should support NN HA & Cluster Association. (Erik Bergenholtz via rlevas)

Repository: ambari
Updated Branches:
  refs/heads/trunk 0eeac02e0 -> 3602b5835


AMBARI-11719. Pig View should support NN HA & Cluster Association. (Erik Bergenholtz via rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3602b583
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3602b583
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3602b583

Branch: refs/heads/trunk
Commit: 3602b583515a1bdad4711b54779e477112f56d9f
Parents: 0eeac02
Author: Erik Bergenholtz <eb...@hortonworks.com>
Authored: Mon Jun 8 09:19:08 2015 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Mon Jun 8 09:19:18 2015 -0400

----------------------------------------------------------------------
 contrib/views/pig/pom.xml                       |  12 +-
 .../ambari/view/pig/PropertyValidator.java      | 114 ++++++
 .../view/pig/resources/files/FileService.java   |   4 +-
 .../pig/resources/jobs/JobResourceManager.java  | 103 ++---
 .../scripts/ScriptResourceManager.java          |   5 +-
 .../ambari/view/pig/services/BaseService.java   |  19 +-
 .../view/pig/templeton/client/TempletonApi.java |   5 +-
 .../templeton/client/TempletonApiFactory.java   |  56 +++
 .../ambari/view/pig/utils/FilePaginator.java    |   9 +-
 .../apache/ambari/view/pig/utils/HdfsApi.java   | 393 -------------------
 .../pig/utils/ServiceFormattedException.java    |  46 ++-
 .../ambari/view/pig/utils/UserLocalObjects.java |  81 ++++
 .../resources/ui/pig-web/app/styles/style.less  |  20 +-
 contrib/views/pig/src/main/resources/view.xml   |  91 ++++-
 .../org/apache/ambari/view/pig/BasePigTest.java |   3 +-
 .../apache/ambari/view/pig/test/FileTest.java   |   4 +-
 .../apache/ambari/view/pig/test/HelpTest.java   |   6 +-
 .../ambari/view/pig/test/IntegrationalTest.java |  37 +-
 .../apache/ambari/view/pig/test/JobTest.java    |  81 ++--
 .../apache/ambari/view/pig/test/ScriptTest.java |   7 +-
 .../view/pig/test/ScriptTestHDFSUnmanaged.java  |  12 +-
 .../view/pig/test/ScriptTestUnmanaged.java      |  10 +-
 22 files changed, 521 insertions(+), 597 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/pig/pom.xml b/contrib/views/pig/pom.xml
index 7ad7597..321f032 100644
--- a/contrib/views/pig/pom.xml
+++ b/contrib/views/pig/pom.xml
@@ -19,7 +19,7 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.ambari.contrib.views</groupId>
   <artifactId>pig</artifactId>
-  <version>0.1.0-SNAPSHOT</version>
+  <version>1.0.0-SNAPSHOT</version>
   <name>Pig</name>
 
   <parent>
@@ -113,6 +113,16 @@
       <version>${hadoop-version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>commons-validator</groupId>
+      <artifactId>commons-validator</artifactId>
+      <version>1.4.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari.contrib.views</groupId>
+      <artifactId>ambari-views-utils</artifactId>
+      <version>0.0.1-SNAPSHOT</version>
+    </dependency>
   </dependencies>
 
   <properties>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/PropertyValidator.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/PropertyValidator.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/PropertyValidator.java
new file mode 100644
index 0000000..c36561f
--- /dev/null
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/PropertyValidator.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.pig;
+
+import org.apache.ambari.view.ViewInstanceDefinition;
+import org.apache.ambari.view.validation.ValidationResult;
+import org.apache.ambari.view.validation.Validator;
+import org.apache.commons.validator.routines.RegexValidator;
+import org.apache.commons.validator.routines.UrlValidator;
+
+public class PropertyValidator implements Validator {
+
+  public static final String WEBHDFS_URL = "webhdfs.url";
+  public static final String WEBHCAT_PORT = "webhcat.port";
+
+  @Override
+  public ValidationResult validateInstance(ViewInstanceDefinition viewInstanceDefinition, ValidationContext validationContext) {
+    return null;
+  }
+
+  @Override
+  public ValidationResult validateProperty(String property, ViewInstanceDefinition viewInstanceDefinition, ValidationContext validationContext) {
+    // 1. Validate non cluster associated properties
+    // no properties
+
+    // 2. if associated with cluster, no need to validate associated properties
+    String cluster = viewInstanceDefinition.getClusterHandle();
+    if (cluster != null) {
+      return ValidationResult.SUCCESS;
+    }
+
+    // 3. Cluster associated properties
+    if (property.equals(WEBHDFS_URL)) {
+      String webhdfsUrl = viewInstanceDefinition.getPropertyMap().get(WEBHDFS_URL);
+      if (!validateHdfsURL(webhdfsUrl)) {
+        return new InvalidPropertyValidationResult(false, "Must be valid URL");
+      }
+    }
+
+    if (property.equals(WEBHCAT_PORT)) {
+      String webhcatPort = viewInstanceDefinition.getPropertyMap().get(WEBHCAT_PORT);
+      if (webhcatPort != null) {
+        try {
+          int port = Integer.valueOf(webhcatPort);
+          if (port < 1 || port > 65535) {
+            return new InvalidPropertyValidationResult(false, "Must be from 1 to 65535");
+          }
+        } catch (NumberFormatException e) {
+          return new InvalidPropertyValidationResult(false, "Must be integer");
+        }
+      }
+    }
+
+    return ValidationResult.SUCCESS;
+  }
+
+  /**
+   * Validates filesystem URL
+   * @param webhdfsUrl url
+   * @return is url valid
+   */
+  private boolean validateHdfsURL(String webhdfsUrl) {
+    String[] schemes = {"webhdfs", "hdfs", "s3", "file"};
+    return validateURL(webhdfsUrl, schemes);
+  }
+
+  private boolean validateHttpURL(String webhdfsUrl) {
+    String[] schemes = {"http", "https"};
+    return validateURL(webhdfsUrl, schemes);
+  }
+
+  private boolean validateURL(String webhdfsUrl, String[] schemes) {
+    RegexValidator authority = new RegexValidator(".*");
+    UrlValidator urlValidator = new UrlValidator(schemes, authority, UrlValidator.ALLOW_LOCAL_URLS);
+    return urlValidator.isValid(webhdfsUrl);
+  }
+
+  public static class InvalidPropertyValidationResult implements ValidationResult {
+    private boolean valid;
+    private String detail;
+
+    public InvalidPropertyValidationResult(boolean valid, String detail) {
+      this.valid = valid;
+      this.detail = detail;
+    }
+
+    @Override
+    public boolean isValid() {
+      return valid;
+    }
+
+    @Override
+    public String getDetail() {
+      return detail;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
index 6a2628d..6697160 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
@@ -23,6 +23,8 @@ import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.pig.services.BaseService;
 import org.apache.ambari.view.pig.utils.*;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.fs.FileStatus;
@@ -179,7 +181,7 @@ public class FileService extends BaseService {
    */
   public static void hdfsSmokeTest(ViewContext context) {
     try {
-      HdfsApi api = HdfsApi.connectToHDFSApi(context);
+      HdfsApi api = HdfsUtil.connectToHDFSApi(context);
       api.getStatus();
     } catch (WebApplicationException ex) {
       throw ex;

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/jobs/JobResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/jobs/JobResourceManager.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/jobs/JobResourceManager.java
index 19cf69f..5553255 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/jobs/JobResourceManager.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/jobs/JobResourceManager.java
@@ -24,11 +24,12 @@ import org.apache.ambari.view.pig.persistence.utils.Indexed;
 import org.apache.ambari.view.pig.resources.PersonalCRUDResourceManager;
 import org.apache.ambari.view.pig.resources.jobs.models.PigJob;
 import org.apache.ambari.view.pig.resources.jobs.utils.JobPolling;
-import org.apache.ambari.view.pig.services.BaseService;
 import org.apache.ambari.view.pig.templeton.client.TempletonApi;
-import org.apache.ambari.view.pig.utils.HdfsApi;
+import org.apache.ambari.view.pig.templeton.client.TempletonApiFactory;
 import org.apache.ambari.view.pig.utils.MisconfigurationFormattedException;
 import org.apache.ambari.view.pig.utils.ServiceFormattedException;
+import org.apache.ambari.view.pig.utils.UserLocalObjects;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -58,26 +59,8 @@ public class JobResourceManager extends PersonalCRUDResourceManager<PigJob> {
    */
   public JobResourceManager(ViewContext context) {
     super(PigJob.class, context);
-    setupPolling();
-  }
 
-  /**
-   * Get templeton api business delegate
-   * @return templeton api business delegate
-   */
-  public TempletonApi getTempletonApi() {
-    if (api == null) {
-      api = connectToTempletonApi(context);
-    }
-    return api;
-  }
-
-  /**
-   * Set templeton api business delegate
-   * @param api templeton api business delegate
-   */
-  public void setTempletonApi(TempletonApi api) {
-    this.api = api;
+    setupPolling();
   }
 
   private void setupPolling() {
@@ -124,7 +107,7 @@ public class JobResourceManager extends PersonalCRUDResourceManager<PigJob> {
 
     if (object.getJobId() != null) {
       try {
-        getTempletonApi().killJob(object.getJobId());
+        UserLocalObjects.getTempletonApi(context).killJob(object.getJobId());
       } catch (IOException e) {
         LOG.debug("Job kill FAILED");
         throw e;
@@ -153,6 +136,7 @@ public class JobResourceManager extends PersonalCRUDResourceManager<PigJob> {
     String newSourceFilePath = storedir + "/source.pig";
     String newPythonScriptPath = storedir + "/udf.py";
     String templetonParamsFilePath = storedir + "/params";
+
     try {
       // additional file can be passed to copy into work directory
       if (job.getSourceFileContent() != null && !job.getSourceFileContent().isEmpty()) {
@@ -160,22 +144,22 @@ public class JobResourceManager extends PersonalCRUDResourceManager<PigJob> {
         job.setSourceFileContent(null); // we should not store content in DB
         save(job);
 
-        FSDataOutputStream stream = HdfsApi.getInstance(context).create(newSourceFilePath, true);
+        FSDataOutputStream stream = UserLocalObjects.getHdfsApi(context).create(newSourceFilePath, true);
         stream.writeBytes(sourceFileContent);
         stream.close();
       } else {
         if (job.getSourceFile() != null && !job.getSourceFile().isEmpty()) {
           // otherwise, just copy original file
-          if (!HdfsApi.getInstance(context).copy(job.getSourceFile(), newSourceFilePath)) {
-            throw new ServiceFormattedException("Can't copy source file from " + job.getSourceFile() +
-                " to " + newPigScriptPath);
-          }
+          UserLocalObjects.getHdfsApi(context).copy(job.getSourceFile(), newSourceFilePath);
         }
       }
     } catch (IOException e) {
       throw new ServiceFormattedException("Can't create/copy source file: " + e.toString(), e);
     } catch (InterruptedException e) {
       throw new ServiceFormattedException("Can't create/copy source file: " + e.toString(), e);
+    } catch (HdfsApiException e) {
+      throw new ServiceFormattedException("Can't copy source file from " + job.getSourceFile() +
+          " to " + newPigScriptPath, e);
     }
 
     try {
@@ -188,16 +172,16 @@ public class JobResourceManager extends PersonalCRUDResourceManager<PigJob> {
         job.setForcedContent(null); // we should not store content in DB
         save(job);
 
-        FSDataOutputStream stream = HdfsApi.getInstance(context).create(newPigScriptPath, true);
+        FSDataOutputStream stream = UserLocalObjects.getHdfsApi(context).create(newPigScriptPath, true);
         stream.writeBytes(forcedContent);
         stream.close();
       } else {
         // otherwise, just copy original file
-        if (!HdfsApi.getInstance(context).copy(job.getPigScript(), newPigScriptPath)) {
-          throw new ServiceFormattedException("Can't copy pig script file from " + job.getPigScript() +
-              " to " + newPigScriptPath);
-        }
+        UserLocalObjects.getHdfsApi(context).copy(job.getPigScript(), newPigScriptPath);
       }
+    } catch (HdfsApiException e) {
+      throw new ServiceFormattedException("Can't copy pig script file from " + job.getPigScript() +
+          " to " + newPigScriptPath, e);
     } catch (IOException e) {
       throw new ServiceFormattedException("Can't create/copy pig script file: " + e.toString(), e);
     } catch (InterruptedException e) {
@@ -206,10 +190,10 @@ public class JobResourceManager extends PersonalCRUDResourceManager<PigJob> {
 
     if (job.getPythonScript() != null && !job.getPythonScript().isEmpty()) {
       try {
-        if (!HdfsApi.getInstance(context).copy(job.getPythonScript(), newPythonScriptPath)) {
-          throw new ServiceFormattedException("Can't copy python udf script file from " + job.getPythonScript() +
-              " to " + newPythonScriptPath);
-        }
+        UserLocalObjects.getHdfsApi(context).copy(job.getPythonScript(), newPythonScriptPath);
+      } catch (HdfsApiException e) {
+        throw new ServiceFormattedException("Can't copy python udf script file from " + job.getPythonScript() +
+            " to " + newPythonScriptPath);
       } catch (IOException e) {
         throw new ServiceFormattedException("Can't create/copy python udf file: " + e.toString(), e);
       } catch (InterruptedException e) {
@@ -218,7 +202,7 @@ public class JobResourceManager extends PersonalCRUDResourceManager<PigJob> {
     }
 
     try {
-      FSDataOutputStream stream = HdfsApi.getInstance(context).create(templetonParamsFilePath, true);
+      FSDataOutputStream stream = UserLocalObjects.getHdfsApi(context).create(templetonParamsFilePath, true);
       if (job.getTempletonArguments() != null) {
         stream.writeBytes(job.getTempletonArguments());
       }
@@ -233,9 +217,9 @@ public class JobResourceManager extends PersonalCRUDResourceManager<PigJob> {
     job.setStatusDir(statusdir);
     job.setDateStarted(System.currentTimeMillis() / 1000L);
 
-    TempletonApi.JobData data = null;
+    TempletonApi.JobData data;
     try {
-      data = getTempletonApi().runPigQuery(new File(job.getPigScript()), statusdir, job.getTempletonArguments());
+      data = UserLocalObjects.getTempletonApi(context).runPigQuery(new File(job.getPigScript()), statusdir, job.getTempletonArguments());
     } catch (IOException templetonBadResponse) {
       String msg = String.format("Templeton bad response: %s", templetonBadResponse.toString());
       LOG.debug(msg);
@@ -251,9 +235,9 @@ public class JobResourceManager extends PersonalCRUDResourceManager<PigJob> {
    * @param job job object
    */
   public void retrieveJobStatus(PigJob job) {
-    TempletonApi.JobInfo info = null;
+    TempletonApi.JobInfo info;
     try {
-      info = getTempletonApi().checkJob(job.getJobId());
+      info = UserLocalObjects.getTempletonApi(context).checkJob(job.getJobId());
     } catch (IOException e) {
       LOG.warn(String.format("IO Exception: %s", e));
       return;
@@ -268,27 +252,27 @@ public class JobResourceManager extends PersonalCRUDResourceManager<PigJob> {
       switch (runState) {
         case RUN_STATE_KILLED:
           LOG.debug(String.format("Job KILLED: %s", job.getJobId()));
-          isStatusChanged = job.getStatus() != PigJob.PIG_JOB_STATE_KILLED;
+          isStatusChanged = !job.getStatus().equals(PigJob.PIG_JOB_STATE_KILLED);
           job.setStatus(PigJob.PIG_JOB_STATE_KILLED);
           break;
         case RUN_STATE_FAILED:
           LOG.debug(String.format("Job FAILED: %s", job.getJobId()));
-          isStatusChanged = job.getStatus() != PigJob.PIG_JOB_STATE_FAILED;
+          isStatusChanged = !job.getStatus().equals(PigJob.PIG_JOB_STATE_FAILED);
           job.setStatus(PigJob.PIG_JOB_STATE_FAILED);
           break;
         case RUN_STATE_PREP:
         case RUN_STATE_RUNNING:
-          isStatusChanged = job.getStatus() != PigJob.PIG_JOB_STATE_RUNNING;
+          isStatusChanged = !job.getStatus().equals(PigJob.PIG_JOB_STATE_RUNNING);
           job.setStatus(PigJob.PIG_JOB_STATE_RUNNING);
           break;
         case RUN_STATE_SUCCEEDED:
           LOG.debug(String.format("Job COMPLETED: %s", job.getJobId()));
-          isStatusChanged = job.getStatus() != PigJob.PIG_JOB_STATE_COMPLETED;
+          isStatusChanged = !job.getStatus().equals(PigJob.PIG_JOB_STATE_COMPLETED);
           job.setStatus(PigJob.PIG_JOB_STATE_COMPLETED);
           break;
         default:
           LOG.debug(String.format("Job in unknown state: %s", job.getJobId()));
-          isStatusChanged = job.getStatus() != PigJob.PIG_JOB_STATE_UNKNOWN;
+          isStatusChanged = !job.getStatus().equals(PigJob.PIG_JOB_STATE_UNKNOWN);
           job.setStatus(PigJob.PIG_JOB_STATE_UNKNOWN);
           break;
       }
@@ -315,7 +299,8 @@ public class JobResourceManager extends PersonalCRUDResourceManager<PigJob> {
    */
   public static void webhcatSmokeTest(ViewContext context) {
     try {
-      TempletonApi api = connectToTempletonApi(context);
+      TempletonApiFactory templetonApiFactory = new TempletonApiFactory(context);
+      TempletonApi api = templetonApiFactory.connectToTempletonApi();
       api.status();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -324,30 +309,6 @@ public class JobResourceManager extends PersonalCRUDResourceManager<PigJob> {
     }
   }
 
-  private static TempletonApi connectToTempletonApi(ViewContext context) {
-    String webhcatUrl = context.getProperties().get("webhcat.url");
-    if (webhcatUrl == null) {
-      String message = "webhcat.url is not configured!";
-      LOG.error(message);
-      throw new MisconfigurationFormattedException("webhcat.url");
-    }
-    return new TempletonApi(context.getProperties().get("webhcat.url"),
-        getTempletonUser(context), context);
-  }
-
-  /**
-   * Extension point to use different usernames in templeton
-   * requests instead of logged in user
-   * @return username in templeton
-   */
-  private static String getTempletonUser(ViewContext context) {
-    String username = context.getProperties().get("webhcat.username");
-    if (username == null || username.compareTo("null") == 0 || username.compareTo("") == 0) {
-      username = getUsername(context);
-    }
-    return username;
-  }
-
   public static final int RUN_STATE_RUNNING = 1;
   public static final int RUN_STATE_SUCCEEDED = 2;
   public static final int RUN_STATE_FAILED = 3;

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/scripts/ScriptResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/scripts/ScriptResourceManager.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/scripts/ScriptResourceManager.java
index 9a56bb4..3d9d13e 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/scripts/ScriptResourceManager.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/scripts/ScriptResourceManager.java
@@ -22,10 +22,9 @@ import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.pig.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.pig.resources.PersonalCRUDResourceManager;
 import org.apache.ambari.view.pig.resources.scripts.models.PigScript;
-import org.apache.ambari.view.pig.services.BaseService;
-import org.apache.ambari.view.pig.utils.HdfsApi;
 import org.apache.ambari.view.pig.utils.MisconfigurationFormattedException;
 import org.apache.ambari.view.pig.utils.ServiceFormattedException;
+import org.apache.ambari.view.pig.utils.UserLocalObjects;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.slf4j.Logger;
@@ -78,7 +77,7 @@ public class ScriptResourceManager extends PersonalCRUDResourceManager<PigScript
       LOG.debug("Trying to create new file " + newFilePath);
 
       try {
-        FSDataOutputStream stream = HdfsApi.getInstance(context).create(newFilePath, false);
+        FSDataOutputStream stream = UserLocalObjects.getHdfsApi(context).create(newFilePath, false);
         stream.close();
         fileCreated = true;
         LOG.debug("File created successfully!");

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/services/BaseService.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/services/BaseService.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/services/BaseService.java
index ec15c37..7d3508a 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/services/BaseService.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/services/BaseService.java
@@ -21,19 +21,12 @@ package org.apache.ambari.view.pig.services;
 import com.google.inject.Inject;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.pig.persistence.Storage;
-import org.apache.ambari.view.pig.utils.HdfsApi;
 import org.apache.ambari.view.pig.persistence.utils.StorageUtil;
-import org.apache.ambari.view.pig.utils.MisconfigurationFormattedException;
-import org.apache.ambari.view.pig.utils.ServiceFormattedException;
-import org.json.simple.JSONObject;
+import org.apache.ambari.view.pig.utils.UserLocalObjects;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-import java.util.HashMap;
-
 
 /**
  * Parent service
@@ -58,12 +51,8 @@ public class BaseService {
     this.storage = storage;
   }
 
-  private HdfsApi hdfsApi = null;
-
   protected HdfsApi getHdfsApi()  {
-    if (hdfsApi == null)
-      hdfsApi = HdfsApi.getInstance(context);
-    return hdfsApi;
+    return UserLocalObjects.getHdfsApi(context);
   }
 
   /**
@@ -71,6 +60,6 @@ public class BaseService {
    * @param api HdfsApi instance
    */
   public void setHdfsApi(HdfsApi api)  {
-    hdfsApi = api;
+    UserLocalObjects.setHdfsApi(api, context);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApi.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApi.java
index 497ec82..66568d7 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApi.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApi.java
@@ -18,7 +18,6 @@
 
 package org.apache.ambari.view.pig.templeton.client;
 
-import com.google.gson.Gson;
 import com.sun.jersey.api.client.Client;
 import com.sun.jersey.api.client.WebResource;
 import com.sun.jersey.api.client.config.ClientConfig;
@@ -38,8 +37,6 @@ import java.util.Map;
  * Templeton Business Delegate
  */
 public class TempletonApi {
-  private final Gson gson = new Gson();
-
   protected final static Logger LOG =
       LoggerFactory.getLogger(TempletonApi.class);
 
@@ -49,7 +46,7 @@ public class TempletonApi {
 
   /**
    * TempletonApi constructor
-   * @param api webhcat.url
+   * @param api webhcat url
    * @param doAs doAs argument
    * @param context context with URLStreamProvider
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApiFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApiFactory.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApiFactory.java
new file mode 100644
index 0000000..238b58f
--- /dev/null
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/TempletonApiFactory.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.pig.templeton.client;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.pig.utils.MisconfigurationFormattedException;
+import org.apache.ambari.view.utils.ambari.AmbariApi;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TempletonApiFactory {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(TempletonApiFactory.class);
+
+  private ViewContext context;
+  private AmbariApi ambariApi;
+
+  public TempletonApiFactory(ViewContext context) {
+    this.context = context;
+    this.ambariApi = new AmbariApi(context);
+  }
+
+  public TempletonApi connectToTempletonApi() {
+    String webhcatUrl = ambariApi.getServices().getWebHCatURL();
+    return new TempletonApi(webhcatUrl, getTempletonUser(context), context);
+  }
+
+  /**
+   * Extension point to use different usernames in templeton
+   * requests instead of logged in user
+   * @return username in templeton
+   */
+  private String getTempletonUser(ViewContext context) {
+    String username = context.getProperties().get("webhcat.username");
+    if (username == null || username.compareTo("null") == 0 || username.compareTo("") == 0) {
+      username = context.getUsername();
+    }
+    return username;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/FilePaginator.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/FilePaginator.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/FilePaginator.java
index 9ebbb80..2022769 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/FilePaginator.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/FilePaginator.java
@@ -19,7 +19,6 @@
 package org.apache.ambari.view.pig.utils;
 
 import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.pig.services.BaseService;
 import org.apache.hadoop.fs.FSDataInputStream;
 
 import java.io.IOException;
@@ -35,7 +34,7 @@ public class FilePaginator {
   private static int PAGE_SIZE = 1*1024*1024;  // 1MB
 
   private String filePath;
-  private HdfsApi hdfsApi;
+  private ViewContext context;
 
   /**
    * Constructor
@@ -44,7 +43,7 @@ public class FilePaginator {
    */
   public FilePaginator(String filePath, ViewContext context) {
     this.filePath = filePath;
-    hdfsApi = HdfsApi.getInstance(context);
+    this.context = context;
   }
 
   /**
@@ -63,7 +62,7 @@ public class FilePaginator {
    */
   public long pageCount() throws IOException, InterruptedException {
     return (long)
-        ceil( hdfsApi.getFileStatus(filePath).getLen() / ((double)PAGE_SIZE) );
+        ceil( UserLocalObjects.getHdfsApi(context).getFileStatus(filePath).getLen() / ((double)PAGE_SIZE) );
   }
 
   /**
@@ -74,7 +73,7 @@ public class FilePaginator {
    * @throws InterruptedException
    */
   public String readPage(long page) throws IOException, InterruptedException {
-    FSDataInputStream stream = hdfsApi.open(filePath);
+    FSDataInputStream stream = UserLocalObjects.getHdfsApi(context).open(filePath);
     try {
       stream.seek(page * PAGE_SIZE);
     } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/HdfsApi.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/HdfsApi.java
deleted file mode 100644
index 7af37ad..0000000
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/HdfsApi.java
+++ /dev/null
@@ -1,393 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.pig.utils;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
-import org.apache.hadoop.fs.permission.FsPermission;
-
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.net.URI;
-import java.security.PrivilegedExceptionAction;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.security.UserGroupInformation;
-import org.json.simple.JSONArray;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.LinkedHashMap;
-
-/**
- * HDFS Business Delegate
- */
-public class HdfsApi {
-  private Configuration conf = new Configuration();
-
-  private FileSystem fs;
-  private UserGroupInformation ugi;
-
-  private final static Logger LOG =
-      LoggerFactory.getLogger(HdfsApi.class);
-  private Map<String, String> params;
-
-  /**
-   * Constructor
-   * @param defaultFs hdfs uri
-   * @param username user.name
-   * @param params map of parameters
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public HdfsApi(final String defaultFs, String username, Map<String, String> params) throws IOException,
-      InterruptedException {
-    this.params = params;
-
-    Thread.currentThread().setContextClassLoader(null);
-    conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
-    conf.set("fs.webhdfs.impl", WebHdfsFileSystem.class.getName());
-    conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
-    ugi = UserGroupInformation.createProxyUser(username, getProxyUser());
-    fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
-      public FileSystem run() throws IOException {
-        return FileSystem.get(URI.create(defaultFs), conf);
-      }
-    });
-  }
-
-  private UserGroupInformation getProxyUser() throws IOException {
-    UserGroupInformation proxyuser;
-    if (params.containsKey("proxyuser")) {
-      proxyuser = UserGroupInformation.createRemoteUser(params.get("proxyuser"));
-    } else {
-      proxyuser = UserGroupInformation.getCurrentUser();
-    }
-
-    proxyuser.setAuthenticationMethod(getAuthenticationMethod());
-    return proxyuser;
-  }
-
-  private UserGroupInformation.AuthenticationMethod getAuthenticationMethod() {
-    UserGroupInformation.AuthenticationMethod authMethod;
-    if (params.containsKey("auth")) {
-      authMethod = UserGroupInformation.AuthenticationMethod.valueOf(params.get("auth"));
-    } else {
-      authMethod = UserGroupInformation.AuthenticationMethod.SIMPLE;
-    }
-    return authMethod;
-  }
-
-  /**
-   * List dir operation
-   * @param path path
-   * @return array of FileStatus objects
-   * @throws FileNotFoundException
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public FileStatus[] listdir(final String path) throws FileNotFoundException,
-      IOException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<FileStatus[]>() {
-      public FileStatus[] run() throws FileNotFoundException, Exception {
-        return fs.listStatus(new Path(path));
-      }
-    });
-  }
-
-  /**
-   * Get file status
-   * @param path path
-   * @return file status
-   * @throws IOException
-   * @throws FileNotFoundException
-   * @throws InterruptedException
-   */
-  public FileStatus getFileStatus(final String path) throws IOException,
-      FileNotFoundException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<FileStatus>() {
-      public FileStatus run() throws FileNotFoundException, IOException {
-        return fs.getFileStatus(new Path(path));
-      }
-    });
-  }
-
-  /**
-   * Make directory
-   * @param path path
-   * @return success
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public boolean mkdir(final String path) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return fs.mkdirs(new Path(path));
-      }
-    });
-  }
-
-  /**
-   * Rename
-   * @param src source path
-   * @param dst destination path
-   * @return success
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public boolean rename(final String src, final String dst) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return fs.rename(new Path(src), new Path(dst));
-      }
-    });
-  }
-
-  /**
-   * Delete
-   * @param path path
-   * @param recursive delete recursive
-   * @return success
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public boolean delete(final String path, final boolean recursive)
-      throws IOException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return fs.delete(new Path(path), recursive);
-      }
-    });
-  }
-
-  /**
-   * Home directory
-   * @return home directory
-   * @throws Exception
-   */
-  public Path getHomeDir() throws Exception {
-    return ugi.doAs(new PrivilegedExceptionAction<Path>() {
-      public Path run() throws IOException {
-        return fs.getHomeDirectory();
-      }
-    });
-  }
-
-  /**
-   * Hdfs Status
-   * @return home directory
-   * @throws Exception
-   */
-  public FsStatus getStatus() throws Exception {
-    return ugi.doAs(new PrivilegedExceptionAction<FsStatus>() {
-      public FsStatus run() throws IOException {
-        return fs.getStatus();
-      }
-    });
-  }
-
-  /**
-   * Create file
-   * @param path path
-   * @param overwrite overwrite existent file
-   * @return output stream
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public FSDataOutputStream create(final String path, final boolean overwrite)
-      throws IOException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
-      public FSDataOutputStream run() throws Exception {
-        return fs.create(new Path(path), overwrite);
-      }
-    });
-  }
-
-  /**
-   * Open file
-   * @param path path
-   * @return input stream
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public FSDataInputStream open(final String path) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<FSDataInputStream>() {
-      public FSDataInputStream run() throws Exception {
-        return fs.open(new Path(path));
-      }
-    });
-  }
-
-  /**
-   * Copy file
-   * @param src source path
-   * @param dest destination path
-   * @return success
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public boolean copy(final String src, final String dest) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return FileUtil.copy(fs, new Path(src), fs, new Path(dest), false, conf);
-      }
-    });
-  }
-
-  /**
-   * Converts a Hadoop permission into a Unix permission symbolic representation
-   * (i.e. -rwxr--r--) or default if the permission is NULL.
-   *
-   * @param p
-   *          Hadoop permission.
-   * @return the Unix permission symbolic representation or default if the
-   *         permission is NULL.
-   */
-  private static String permissionToString(FsPermission p) {
-    return (p == null) ? "default" : "-" + p.getUserAction().SYMBOL
-        + p.getGroupAction().SYMBOL + p.getOtherAction().SYMBOL;
-  }
-
-  /**
-   * Converts a Hadoop <code>FileStatus</code> object into a JSON array object.
-   * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
-   * specified URL.
-   * <p/>
-   *
-   * @param status
-   *          Hadoop file status.
-   * @return The JSON representation of the file status.
-   */
-
-  public static Map<String, Object> fileStatusToJSON(FileStatus status) {
-    Map<String, Object> json = new LinkedHashMap<String, Object>();
-    json.put("path", status.getPath().toString());
-    json.put("isDirectory", status.isDirectory());
-    json.put("len", status.getLen());
-    json.put("owner", status.getOwner());
-    json.put("group", status.getGroup());
-    json.put("permission", permissionToString(status.getPermission()));
-    json.put("accessTime", status.getAccessTime());
-    json.put("modificationTime", status.getModificationTime());
-    json.put("blockSize", status.getBlockSize());
-    json.put("replication", status.getReplication());
-    return json;
-  }
-
-  /**
-   * Converts a Hadoop <code>FileStatus</code> array into a JSON array object.
-   * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
-   * specified URL.
-   * <p/>
-   *
-   * @param status
-   *          Hadoop file status array.
-   * @return The JSON representation of the file status array.
-   */
-  @SuppressWarnings("unchecked")
-  public static JSONArray fileStatusToJSON(FileStatus[] status) {
-    JSONArray json = new JSONArray();
-    if (status != null) {
-      for (FileStatus s : status) {
-        json.add(fileStatusToJSON(s));
-      }
-    }
-    return json;
-  }
-
-
-  private static Map<String, HdfsApi> viewSingletonObjects = new HashMap<String, HdfsApi>();
-  /**
-   * Returns HdfsApi object specific to instance
-   * @param context View Context instance
-   * @return Hdfs business delegate object
-   */
-  public static HdfsApi getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), connectToHDFSApi(context));
-    return viewSingletonObjects.get(context.getInstanceName());
-  }
-
-  public static void setInstance(ViewContext context, HdfsApi api) {
-    viewSingletonObjects.put(context.getInstanceName(), api);
-  }
-
-  public static HdfsApi connectToHDFSApi(ViewContext context) {
-    HdfsApi api = null;
-    Thread.currentThread().setContextClassLoader(null);
-
-    String defaultFS = context.getProperties().get("webhdfs.url");
-    if (defaultFS == null) {
-      String message = "webhdfs.url is not configured!";
-      LOG.error(message);
-      throw new MisconfigurationFormattedException("webhdfs.url");
-    }
-
-    try {
-      api = new HdfsApi(defaultFS, getHdfsUsername(context), getHdfsAuthParams(context));
-      LOG.info("HdfsApi connected OK");
-    } catch (IOException e) {
-      String message = "HdfsApi IO error: " + e.getMessage();
-      LOG.error(message);
-      throw new ServiceFormattedException(message, e);
-    } catch (InterruptedException e) {
-      String message = "HdfsApi Interrupted error: " + e.getMessage();
-      LOG.error(message);
-      throw new ServiceFormattedException(message, e);
-    }
-    return api;
-  }
-
-  private static Map<String, String> getHdfsAuthParams(ViewContext context) {
-    String auth = context.getProperties().get("webhdfs.auth");
-    Map<String, String> params = new HashMap<String, String>();
-    if (auth == null || auth.isEmpty()) {
-      auth = "auth=SIMPLE";
-    }
-    for(String param : auth.split(";")) {
-      String[] keyvalue = param.split("=");
-      if (keyvalue.length != 2) {
-        LOG.error("Can not parse authentication param " + param + " in " + auth);
-        continue;
-      }
-      params.put(keyvalue[0], keyvalue[1]);
-    }
-    return params;
-  }
-
-  public static String getHdfsUsername(ViewContext context) {
-    String userName = context.getProperties().get("webhdfs.username");
-    if (userName == null || userName.compareTo("null") == 0 || userName.compareTo("") == 0)
-      userName = context.getUsername();
-    return userName;
-  }
-
-  public static void dropAllConnections() {
-    viewSingletonObjects.clear();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/ServiceFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/ServiceFormattedException.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/ServiceFormattedException.java
index 6e689c6..7b57bb2 100644
--- a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/ServiceFormattedException.java
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/ServiceFormattedException.java
@@ -18,7 +18,6 @@
 
 package org.apache.ambari.view.pig.utils;
 
-import org.apache.commons.lang.exception.ExceptionUtils;
 import org.json.simple.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -26,6 +25,8 @@ import org.slf4j.LoggerFactory;
 import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
+import java.io.PrintWriter;
+import java.io.StringWriter;
 import java.security.AccessControlException;
 import java.util.HashMap;
 
@@ -33,6 +34,10 @@ public class ServiceFormattedException extends WebApplicationException {
   private final static Logger LOG =
       LoggerFactory.getLogger(ServiceFormattedException.class);
 
+  public ServiceFormattedException(Throwable e) {
+    super(errorEntity(null, e, suggestStatus(e)));
+  }
+
   public ServiceFormattedException(String message) {
     super(errorEntity(message, null, suggestStatus(null)));
   }
@@ -47,7 +52,9 @@ public class ServiceFormattedException extends WebApplicationException {
 
   private static int suggestStatus(Throwable exception) {
     int status = 500;
-    if (exception == null) return status;
+    if (exception == null) {
+      return status;
+    }
     if (exception instanceof AccessControlException) {
       status = 403;
     }
@@ -56,16 +63,39 @@ public class ServiceFormattedException extends WebApplicationException {
 
   protected static Response errorEntity(String message, Throwable e, int status) {
     HashMap<String, Object> response = new HashMap<String, Object>();
-    response.put("message", message);
+
     String trace = null;
-    if (e != null)
-      trace = ExceptionUtils.getStackTrace(e);
+
+    response.put("message", message);
+    if (e != null) {
+      trace = e.toString() + "\n\n";
+      StringWriter sw = new StringWriter();
+      e.printStackTrace(new PrintWriter(sw));
+      trace += sw.toString();
+
+      if (message == null) {
+        String innerMessage = e.getMessage();
+        String autoMessage;
+
+        if (innerMessage != null) {
+          autoMessage = String.format("%s [%s]", innerMessage, e.getClass().getSimpleName());
+        } else {
+          autoMessage = e.getClass().getSimpleName();
+        }
+        response.put("message", autoMessage);
+      }
+    }
     response.put("trace", trace);
     response.put("status", status);
 
-    if(message != null) LOG.error(message);
-    if(trace != null) LOG.debug(trace);
+    if(message != null) {
+      LOG.error(message);
+    }
+    if(trace != null) {
+      LOG.error(trace);
+    }
 
-    return Response.status(status).entity(new JSONObject(response)).type(MediaType.APPLICATION_JSON).build();
+    Response.ResponseBuilder responseBuilder = Response.status(status).entity(new JSONObject(response)).type(MediaType.APPLICATION_JSON);
+    return responseBuilder.build();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/UserLocalObjects.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/UserLocalObjects.java b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/UserLocalObjects.java
new file mode 100644
index 0000000..1b22a45
--- /dev/null
+++ b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/UserLocalObjects.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.pig.utils;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.pig.templeton.client.TempletonApi;
+import org.apache.ambari.view.pig.templeton.client.TempletonApiFactory;
+import org.apache.ambari.view.utils.ViewUserLocal;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class UserLocalObjects {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(UserLocalObjects.class);
+
+  /**
+   * HdfsApi user-local instance
+   */
+  private static ViewUserLocal<HdfsApi> hdfsApi;
+
+  /**
+   * TempletonApi user-local instance
+   */
+  private static ViewUserLocal<TempletonApi> templetonApi;
+
+  static {
+    templetonApi = new ViewUserLocal<TempletonApi>(TempletonApi.class) {
+      @Override
+      protected synchronized TempletonApi initialValue(ViewContext context) {
+        TempletonApiFactory templetonApiFactory = new TempletonApiFactory(context);
+        return templetonApiFactory.connectToTempletonApi();
+      }
+    };
+
+    hdfsApi = new ViewUserLocal<HdfsApi>(HdfsApi.class) {
+      @Override
+      protected synchronized HdfsApi initialValue(ViewContext context) {
+        try {
+          return HdfsUtil.connectToHDFSApi(context);
+        } catch (HdfsApiException e) {
+          throw new ServiceFormattedException(e);
+        }
+      }
+    };
+  }
+
+  public static HdfsApi getHdfsApi(ViewContext context) {
+    return hdfsApi.get(context);
+  }
+
+  public static void setHdfsApi(HdfsApi api, ViewContext context) {
+    hdfsApi.set(api, context);
+  }
+
+  public static TempletonApi getTempletonApi(ViewContext context) {
+    return templetonApi.get(context);
+  }
+
+  public static void setTempletonApi(TempletonApi api, ViewContext context) {
+    templetonApi.set(api, context);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/resources/ui/pig-web/app/styles/style.less
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/resources/ui/pig-web/app/styles/style.less b/contrib/views/pig/src/main/resources/ui/pig-web/app/styles/style.less
index 6a0e62c..652cc9b 100644
--- a/contrib/views/pig/src/main/resources/ui/pig-web/app/styles/style.less
+++ b/contrib/views/pig/src/main/resources/ui/pig-web/app/styles/style.less
@@ -24,7 +24,7 @@
 @import './../../bower_components/bootstrap/less/bootstrap.less';
 
 .wrap {
-  padding: 0;
+  padding: 0px;
 }
 
 .moz-padding (@selector, @pre: ~'', @padding) {
@@ -125,7 +125,7 @@
       left: 102%;
       position: relative;
       height: 100%;
-      box-shadow: -2px 0 1px rgba(0, 0, 0, 0.20);
+      box-shadow: -2px 0px 1px rgba(0, 0, 0, 0.20);
       .base-transition( left 300ms linear;);
 
       .nav-script {
@@ -183,8 +183,9 @@
     }
   }
   .pig-nav-item {
-    word-wrap: break-word;
+    word-break: break-word;
     .rm {
+      top:2px;
       position: absolute;
       right: 5px;
       top: 5px;
@@ -207,9 +208,10 @@
     position: absolute;
     color: @brand-primary;
     top: 1px;
+    padding: 0px;
     background: transparent;
     right: 3px;
-    padding: 0 4px;
+    padding: 0px 4px;
   }
 }
 
@@ -345,7 +347,7 @@
       position: absolute;
       cursor: s-resize;
       color: #999999;
-      z-index: 2;
+      zndex: 2;
     }
     .table-results {
       .argument {
@@ -465,7 +467,7 @@
       .panel-heading {
         position: fixed;
         top: 0;
-        right: 0;
+        right: 0px;
         left: 0;
         z-index: 77;
         border-top: 1px solid #ddd;
@@ -475,7 +477,7 @@
       .panel-body {
         position: fixed;
         top: 55px;
-        right: 0;
+        right: 0px;
         left: 0;
         z-index: 76;
         border-right: 1px solid #ddd;
@@ -563,7 +565,7 @@
 }
 
 .main-content {
-    padding-left: 0;
+    padding-left: 0px;
 }
 
 .job-container {
@@ -624,7 +626,7 @@
       &.fullscreen {
         position: fixed;
         top: 0;
-        right: 0;
+        right: 0px;
         left: 0;
         z-index: 77;
         border-top: 1px solid #ddd;

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/resources/view.xml b/contrib/views/pig/src/main/resources/view.xml
index 197b44e..1ab4d5e 100644
--- a/contrib/views/pig/src/main/resources/view.xml
+++ b/contrib/views/pig/src/main/resources/view.xml
@@ -17,22 +17,83 @@
 <view>
     <name>PIG</name>
     <label>Pig</label>
-    <version>0.1.0</version>
+    <version>1.0.0</version>
+
     <min-ambari-version>2.0.*</min-ambari-version>
 
+    <validator-class>org.apache.ambari.view.pig.PropertyValidator</validator-class>
+
     <!-- HDFS Configs -->
     <parameter>
         <name>webhdfs.url</name>
-        <description>Enter the WebHDFS FileSystem URI. Typically this is the dfs.namenode.http-address property in the hdfs-site.xml configuration. URL must be accessible from Ambari Server.</description>
+        <description>Enter the WebHDFS FileSystem URI. Typically this is the dfs.namenode.http-address
+            property in the hdfs-site.xml configuration. URL must be accessible from Ambari Server.</description>
         <label>WebHDFS FileSystem URI</label>
         <placeholder>webhdfs://namenode:50070</placeholder>
         <required>true</required>
+        <cluster-config>core-site/fs.defaultFS</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.nameservices</name>
+        <description>Comma-separated list of nameservices. Value of hdfs-site/dfs.nameservices property</description>
+        <label>Logical name of the NameNode cluster</label>
+        <required>false</required>
+        <cluster-config>hdfs-site/dfs.nameservices</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenodes.list</name>
+        <description>Comma-separated list of namenodes for a given nameservice.
+            Value of hdfs-site/dfs.ha.namenodes.[nameservice] property</description>
+        <label>List of NameNodes</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.rpc-address.nn1</name>
+        <description>RPC address for first name node.
+            Value of hdfs-site/dfs.namenode.rpc-address.[nameservice].[namenode1] property</description>
+        <label>First NameNode RPC Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.rpc-address.nn2</name>
+        <description>RPC address for second name node.
+            Value of hdfs-site/dfs.namenode.rpc-address.[nameservice].[namenode2] property</description>
+        <label>Second NameNode RPC Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.http-address.nn1</name>
+        <description>WebHDFS address for first name node.
+            Value of hdfs-site/dfs.namenode.http-address.[nameservice].[namenode1] property</description>
+        <label>First NameNode HTTP (WebHDFS) Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.http-address.nn2</name>
+        <description>WebHDFS address for second name node.
+            Value of hdfs-site/dfs.namenode.http-address.[nameservice].[namenode2] property</description>
+        <label>Second NameNode HTTP (WebHDFS) Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.client.failover.proxy.provider</name>
+        <description>The Java class that HDFS clients use to contact the Active NameNode
+            Value of hdfs-site/dfs.client.failover.proxy.provider.[nameservice] property</description>
+        <label>Failover Proxy Provider</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
     </parameter>
 
     <parameter>
         <name>webhdfs.username</name>
         <description>User and doAs for proxy user for HDFS. By default, uses the currently logged-in Ambari user.</description>
         <label>WebHDFS Username</label>
+        <default-value>${username}</default-value>
         <required>false</required>
     </parameter>
 
@@ -40,15 +101,26 @@
         <name>webhdfs.auth</name>
         <description>Semicolon-separated authentication configs. Default: auth=SIMPLE</description>
         <label>WebHDFS Authentication</label>
+        <default-value>auth=SIMPLE</default-value>
         <required>false</required>
     </parameter>
 
     <!-- WebHCat Configs -->
     <parameter>
-        <name>webhcat.url</name>
-        <description>Enter the WebHCat URL for accessing WebHCat. URL must be accessible from Ambari Server.</description>
-        <label>WebHCat URL</label>
-        <placeholder>http://webhcat.host:50111/templeton/v1</placeholder>
+        <name>webhcat.hostname</name>
+        <description>Enter the WebHCat host name for accessing WebHCat. Host must be accessible from Ambari Server.</description>
+        <label>WebHCat Hostname</label>
+        <placeholder>webhcat-host.example.com</placeholder>
+        <cluster-config>fake</cluster-config>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>webhcat.port</name>
+        <description>Enter the WebHCat port for accessing WebHCat.</description>
+        <label>WebHCat Port</label>
+        <default-value>50111</default-value>
+        <cluster-config>webhcat-site/templeton.port</cluster-config>
         <required>true</required>
     </parameter>
 
@@ -61,13 +133,6 @@
 
     <!-- General Configs -->
     <parameter>
-        <name>dataworker.username</name>
-        <description>The dataworker username. By default, users the currently logged-in Ambari user.</description>
-        <label>Dataworker Username</label>
-        <required>false</required>
-    </parameter>
-
-    <parameter>
         <name>scripts.dir</name>
         <description>HDFS directory to store Pig scripts.</description>
         <label>Scripts HDFS Directory</label>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/BasePigTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/BasePigTest.java b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/BasePigTest.java
index cd9acad..4ca90a1 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/BasePigTest.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/BasePigTest.java
@@ -65,7 +65,8 @@ public abstract class BasePigTest {
         .getAbsoluteFile();
 
     properties.put("dataworker.storagePath", pigStorageFile.toString());
-    properties.put("webhcat.url", "localhost:50111/templeton/v1");
+    properties.put("webhcat.hostname", "localhost/templeton/v1");
+    properties.put("webhcat.port", "50111");
     properties.put("webhcat.username", "admin");
     properties.put("scripts.dir", "/tmp/.pigscripts");
     properties.put("jobs.dir", "/tmp/.pigjobs");

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/FileTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/FileTest.java b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/FileTest.java
index ac26b93..7f0db4a 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/FileTest.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/FileTest.java
@@ -22,6 +22,8 @@ import org.apache.ambari.view.pig.HDFSTest;
 import org.apache.ambari.view.pig.resources.files.FileResource;
 import org.apache.ambari.view.pig.resources.files.FileService;
 import org.apache.ambari.view.pig.utils.*;
+import org.apache.ambari.view.utils.ViewUserLocal;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.json.simple.JSONObject;
 import org.junit.*;
 import org.junit.rules.ExpectedException;
@@ -58,7 +60,7 @@ public class FileTest extends HDFSTest {
   @AfterClass
   public static void shutDown() throws Exception {
     HDFSTest.shutDown(); // super
-    HdfsApi.dropAllConnections(); //cleanup API connection
+    ViewUserLocal.dropAllConnections(HdfsApi.class); //cleanup API connection
   }
 
   private Response doCreateFile() throws IOException, InterruptedException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/HelpTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/HelpTest.java b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/HelpTest.java
index c72da19..5e41aed 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/HelpTest.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/HelpTest.java
@@ -19,9 +19,9 @@
 package org.apache.ambari.view.pig.test;
 
 import org.apache.ambari.view.pig.HDFSTest;
-import org.apache.ambari.view.pig.resources.files.FileService;
 import org.apache.ambari.view.pig.services.HelpService;
-import org.apache.ambari.view.pig.utils.HdfsApi;
+import org.apache.ambari.view.utils.ViewUserLocal;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.json.simple.JSONObject;
 import org.junit.*;
 
@@ -45,7 +45,7 @@ public class HelpTest extends HDFSTest {
   @AfterClass
   public static void shutDown() throws Exception {
     HDFSTest.shutDown(); // super
-    HdfsApi.dropAllConnections(); //cleanup API connection
+    ViewUserLocal.dropAllConnections(HdfsApi.class); //cleanup API connection
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/IntegrationalTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/IntegrationalTest.java b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/IntegrationalTest.java
index 1a5fcf9..9813fea 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/IntegrationalTest.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/IntegrationalTest.java
@@ -24,7 +24,10 @@ import org.apache.ambari.view.pig.persistence.Storage;
 import org.apache.ambari.view.pig.persistence.utils.StorageUtil;
 import org.apache.ambari.view.pig.resources.jobs.JobService;
 import org.apache.ambari.view.pig.resources.scripts.ScriptService;
-import org.apache.ambari.view.pig.utils.HdfsApi;
+import org.apache.ambari.view.pig.templeton.client.TempletonApi;
+import org.apache.ambari.view.pig.utils.UserLocalObjects;
+import org.apache.ambari.view.utils.ViewUserLocal;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.junit.*;
 
 import static org.easymock.EasyMock.*;
@@ -39,34 +42,34 @@ public class IntegrationalTest extends HDFSTest {
 
   @BeforeClass
   public static void startUp() throws Exception {
-      HDFSTest.startUp(); // super
+    HDFSTest.startUp(); // super
   }
 
   @AfterClass
   public static void shutDown() throws Exception {
-      HDFSTest.shutDown(); // super
-      HdfsApi.dropAllConnections(); //cleanup API connection
+    HDFSTest.shutDown(); // super
+    ViewUserLocal.dropAllConnections(HdfsApi.class); //cleanup API connection
   }
 
   @Override
   @Before
   public void setUp() throws Exception {
-      super.setUp();
-      jobService = getService(JobService.class, handler, context);
-      scriptService = getService(ScriptService.class, handler, context);
+    super.setUp();
+    jobService = getService(JobService.class, handler, context);
+    scriptService = getService(ScriptService.class, handler, context);
   }
 
   @Override
   @After
   public void tearDown() throws Exception {
-      super.tearDown();
-      jobService.getResourceManager().setTempletonApi(null);
-      HdfsApi.dropAllConnections();
+    super.tearDown();
+    ViewUserLocal.dropAllConnections(TempletonApi.class);
+    ViewUserLocal.dropAllConnections(HdfsApi.class);
   }
 
   @Test
   public void testHdfsApiDependsOnInstance() throws Exception {
-    HdfsApi.dropAllConnections();
+    ViewUserLocal.dropAllConnections(HdfsApi.class); //cleanup API connection
 
     ViewContext context1 = createNiceMock(ViewContext.class);
     ViewContext context2 = createNiceMock(ViewContext.class);
@@ -85,16 +88,16 @@ public class IntegrationalTest extends HDFSTest {
 
     replay(context1, context2, context3);
 
-    HdfsApi hdfsApi1 = HdfsApi.getInstance(context1);
-    HdfsApi hdfsApi2 = HdfsApi.getInstance(context2);
+    HdfsApi hdfsApi1 = UserLocalObjects.getHdfsApi(context1);
+    HdfsApi hdfsApi2 = UserLocalObjects.getHdfsApi(context2);
     Assert.assertNotSame(hdfsApi1, hdfsApi2);
 
-    HdfsApi hdfsApi1_2 = HdfsApi.getInstance(context1);
-    HdfsApi hdfsApi2_2 = HdfsApi.getInstance(context1);
+    HdfsApi hdfsApi1_2 = UserLocalObjects.getHdfsApi(context1);
+    HdfsApi hdfsApi2_2 = UserLocalObjects.getHdfsApi(context1);
     Assert.assertSame(hdfsApi1_2, hdfsApi2_2);
 
-    HdfsApi hdfsApi1_3 = HdfsApi.getInstance(context1);
-    HdfsApi hdfsApi3_3 = HdfsApi.getInstance(context3);
+    HdfsApi hdfsApi1_3 = UserLocalObjects.getHdfsApi(context1);
+    HdfsApi hdfsApi3_3 = UserLocalObjects.getHdfsApi(context3);
     Assert.assertSame(hdfsApi1_3, hdfsApi3_3);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/JobTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/JobTest.java b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/JobTest.java
index 7f117e9..71c8828 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/JobTest.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/JobTest.java
@@ -24,10 +24,14 @@ import org.apache.ambari.view.pig.resources.jobs.JobService;
 import org.apache.ambari.view.pig.resources.jobs.models.PigJob;
 import org.apache.ambari.view.pig.templeton.client.TempletonApi;
 import org.apache.ambari.view.pig.utils.BadRequestFormattedException;
-import org.apache.ambari.view.pig.utils.HdfsApi;
 import org.apache.ambari.view.pig.utils.NotFoundFormattedException;
 import org.apache.ambari.view.pig.utils.ServiceFormattedException;
+import org.apache.ambari.view.pig.utils.UserLocalObjects;
+import org.apache.ambari.view.utils.ViewUserLocal;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.easymock.EasyMock;
 import org.json.simple.JSONObject;
 import org.junit.*;
 import org.junit.rules.ExpectedException;
@@ -68,8 +72,8 @@ public class JobTest extends BasePigTest {
   @After
   public void tearDown() throws Exception {
     super.tearDown();
-    jobService.getResourceManager().setTempletonApi(null);
-    HdfsApi.dropAllConnections();
+    ViewUserLocal.dropAllConnections(TempletonApi.class);
+    ViewUserLocal.dropAllConnections(HdfsApi.class);
   }
 
   public static Response doCreateJob(String title, String pigScript, String templetonArguments, JobService jobService) {
@@ -100,17 +104,17 @@ public class JobTest extends BasePigTest {
   @Test
   public void testSubmitJob() throws Exception {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+    hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"));
 
     ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
 
     FSDataOutputStream stream = new FSDataOutputStream(do_stream);
     expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
     replay(hdfsApi);
-    HdfsApi.setInstance(context, hdfsApi);
+    UserLocalObjects.setHdfsApi(hdfsApi, context);
 
     TempletonApi api = createNiceMock(TempletonApi.class);
-    jobService.getResourceManager().setTempletonApi(api);
+    UserLocalObjects.setTempletonApi(api, context);
     TempletonApi.JobData data = api.new JobData();
     expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
     replay(api);
@@ -134,17 +138,17 @@ public class JobTest extends BasePigTest {
   @Test
   public void testListJobs() throws Exception {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true).anyTimes();
+    hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"));
 
     ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
 
     FSDataOutputStream stream = new FSDataOutputStream(do_stream);
     expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream).anyTimes();
     replay(hdfsApi);
-    HdfsApi.setInstance(context, hdfsApi);
+    UserLocalObjects.setHdfsApi(hdfsApi, context);
 
     TempletonApi api = createNiceMock(TempletonApi.class);
-    jobService.getResourceManager().setTempletonApi(api);
+    UserLocalObjects.setTempletonApi(api, context);
     TempletonApi.JobData data = api.new JobData();
     expect(api.runPigQuery((File) anyObject(), anyString(), (String) isNull())).andReturn(data).anyTimes();
     replay(api);
@@ -174,17 +178,17 @@ public class JobTest extends BasePigTest {
   @Test
   public void testSubmitJobUsernameProvided() throws Exception {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+    hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"));
 
     ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
 
     FSDataOutputStream stream = new FSDataOutputStream(do_stream);
     expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
     replay(hdfsApi);
-    HdfsApi.setInstance(context, hdfsApi);
+    UserLocalObjects.setHdfsApi(hdfsApi, context);
 
     TempletonApi api = createNiceMock(TempletonApi.class);
-    jobService.getResourceManager().setTempletonApi(api);
+    UserLocalObjects.setTempletonApi(api, context);
     TempletonApi.JobData data = api.new JobData();
     expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
     replay(api);
@@ -199,17 +203,17 @@ public class JobTest extends BasePigTest {
   @Test
   public void testSubmitJobNoArguments() throws Exception {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+    hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"));
 
     ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
 
     FSDataOutputStream stream = new FSDataOutputStream(do_stream);
     expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
     replay(hdfsApi);
-    HdfsApi.setInstance(context, hdfsApi);
+    UserLocalObjects.setHdfsApi(hdfsApi, context);
 
     TempletonApi api = createNiceMock(TempletonApi.class);
-    jobService.getResourceManager().setTempletonApi(api);
+    UserLocalObjects.setTempletonApi(api, context);
     TempletonApi.JobData data = api.new JobData();
     expect(api.runPigQuery((File) anyObject(), anyString(), (String) isNull())).andReturn(data);
     replay(api);
@@ -233,17 +237,17 @@ public class JobTest extends BasePigTest {
   @Test
   public void testSubmitJobNoFile() throws Exception {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+    hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"));
 
     ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
 
     FSDataOutputStream stream = new FSDataOutputStream(do_stream);
     expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
     replay(hdfsApi);
-    HdfsApi.setInstance(context, hdfsApi);
+    UserLocalObjects.setHdfsApi(hdfsApi, context);
 
     TempletonApi api = createNiceMock(TempletonApi.class);
-    jobService.getResourceManager().setTempletonApi(api);
+    UserLocalObjects.setTempletonApi(api, context);
     TempletonApi.JobData data = api.new JobData();
     expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
     replay(api);
@@ -264,10 +268,10 @@ public class JobTest extends BasePigTest {
     expect(hdfsApi.create(endsWith("script.pig"), eq(true))).andReturn(scriptStream);
     expect(hdfsApi.create(endsWith("params"), eq(true))).andReturn(templetonArgsStream);
     replay(hdfsApi);
-    HdfsApi.setInstance(context, hdfsApi);
+    UserLocalObjects.setHdfsApi(hdfsApi, context);
 
     TempletonApi api = createNiceMock(TempletonApi.class);
-    jobService.getResourceManager().setTempletonApi(api);
+    UserLocalObjects.setTempletonApi(api, context);
     TempletonApi.JobData data = api.new JobData();
     expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
     replay(api);
@@ -281,17 +285,17 @@ public class JobTest extends BasePigTest {
   @Test
   public void testSubmitJobNoTitle() throws Exception {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+    hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"));
 
     ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
 
     FSDataOutputStream stream = new FSDataOutputStream(do_stream);
     expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
     replay(hdfsApi);
-    HdfsApi.setInstance(context, hdfsApi);
+    UserLocalObjects.setHdfsApi(hdfsApi, context);
 
     TempletonApi api = createNiceMock(TempletonApi.class);
-    jobService.getResourceManager().setTempletonApi(api);
+    UserLocalObjects.setTempletonApi(api, context);
     TempletonApi.JobData data = api.new JobData();
     expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
     replay(api);
@@ -303,17 +307,18 @@ public class JobTest extends BasePigTest {
   @Test
   public void testSubmitJobFailed() throws Exception {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(false);
+    hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"));
+    EasyMock.expectLastCall().andThrow(new HdfsApiException("Copy failed"));
 
     ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
 
     FSDataOutputStream stream = new FSDataOutputStream(do_stream);
     expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
     replay(hdfsApi);
-    HdfsApi.setInstance(context, hdfsApi);
+    UserLocalObjects.setHdfsApi(hdfsApi, context);
 
     TempletonApi api = createNiceMock(TempletonApi.class);
-    jobService.getResourceManager().setTempletonApi(api);
+    UserLocalObjects.setTempletonApi(api, context);
     TempletonApi.JobData data = api.new JobData();
     expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
     replay(api);
@@ -325,17 +330,17 @@ public class JobTest extends BasePigTest {
   @Test
   public void testSubmitJobTempletonError() throws Exception {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+    hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"));
 
     ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
 
     FSDataOutputStream stream = new FSDataOutputStream(do_stream);
     expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
     replay(hdfsApi);
-    HdfsApi.setInstance(context, hdfsApi);
+    UserLocalObjects.setHdfsApi(hdfsApi, context);
 
     TempletonApi api = createNiceMock(TempletonApi.class);
-    jobService.getResourceManager().setTempletonApi(api);
+    UserLocalObjects.setTempletonApi(api, context);
     TempletonApi.JobData data = api.new JobData();
     // Templeton returns 500 e.g.
     expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andThrow(new IOException());
@@ -348,17 +353,17 @@ public class JobTest extends BasePigTest {
   @Test
   public void testKillJobNoRemove() throws Exception {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+    hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"));
 
     ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
 
     FSDataOutputStream stream = new FSDataOutputStream(do_stream);
     expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
     replay(hdfsApi);
-    HdfsApi.setInstance(context, hdfsApi);
+    UserLocalObjects.setHdfsApi(hdfsApi, context);
 
     TempletonApi api = createStrictMock(TempletonApi.class);
-    jobService.getResourceManager().setTempletonApi(api);
+    UserLocalObjects.setTempletonApi(api, context);
     TempletonApi.JobData data = api.new JobData();
     data.id = "job_id_##";
     expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
@@ -383,17 +388,17 @@ public class JobTest extends BasePigTest {
   @Test
   public void testKillJobWithRemove() throws Exception {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+    hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"));
 
     ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
 
     FSDataOutputStream stream = new FSDataOutputStream(do_stream);
     expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
     replay(hdfsApi);
-    HdfsApi.setInstance(context, hdfsApi);
+    UserLocalObjects.setHdfsApi(hdfsApi, context);
 
     TempletonApi api = createStrictMock(TempletonApi.class);
-    jobService.getResourceManager().setTempletonApi(api);
+    UserLocalObjects.setTempletonApi(api, context);
     TempletonApi.JobData data = api.new JobData();
     data.id = "job_id_##";
     expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);
@@ -418,17 +423,17 @@ public class JobTest extends BasePigTest {
   @Test
   public void testJobStatusFlow() throws Exception {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-    expect(hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"))).andReturn(true);
+    hdfsApi.copy(eq("/tmp/script.pig"), startsWith("/tmp/.pigjobs/"));
 
     ByteArrayOutputStream do_stream = new ByteArrayOutputStream();
 
     FSDataOutputStream stream = new FSDataOutputStream(do_stream);
     expect(hdfsApi.create(anyString(), eq(true))).andReturn(stream);
     replay(hdfsApi);
-    HdfsApi.setInstance(context, hdfsApi);
+    UserLocalObjects.setHdfsApi(hdfsApi, context);
 
     TempletonApi api = createNiceMock(TempletonApi.class);
-    jobService.getResourceManager().setTempletonApi(api);
+    UserLocalObjects.setTempletonApi(api, context);
     TempletonApi.JobData data = api.new JobData();
     data.id = "job_id_#";
     expect(api.runPigQuery((File) anyObject(), anyString(), eq("-useHCatalog"))).andReturn(data);

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTest.java b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTest.java
index 87e7d6b..d00b414 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTest.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTest.java
@@ -19,12 +19,11 @@
 package org.apache.ambari.view.pig.test;
 
 import org.apache.ambari.view.pig.HDFSTest;
-import org.apache.ambari.view.pig.resources.files.FileService;
 import org.apache.ambari.view.pig.resources.scripts.ScriptService;
 import org.apache.ambari.view.pig.resources.scripts.models.PigScript;
-import org.apache.ambari.view.pig.utils.BadRequestFormattedException;
-import org.apache.ambari.view.pig.utils.HdfsApi;
 import org.apache.ambari.view.pig.utils.NotFoundFormattedException;
+import org.apache.ambari.view.utils.ViewUserLocal;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.json.simple.JSONObject;
 import org.junit.*;
 import org.junit.rules.ExpectedException;
@@ -52,7 +51,7 @@ public class ScriptTest extends HDFSTest {
   @AfterClass
   public static void shutDown() throws Exception {
     HDFSTest.shutDown(); // super
-    HdfsApi.dropAllConnections(); //cleanup API connection
+    ViewUserLocal.dropAllConnections(HdfsApi.class);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestHDFSUnmanaged.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestHDFSUnmanaged.java b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestHDFSUnmanaged.java
index 32f2763..d33fd2f 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestHDFSUnmanaged.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestHDFSUnmanaged.java
@@ -27,8 +27,10 @@ import org.apache.ambari.view.pig.persistence.Storage;
 import org.apache.ambari.view.pig.resources.scripts.ScriptService;
 import org.apache.ambari.view.pig.persistence.utils.StorageUtil;
 import org.apache.ambari.view.pig.resources.scripts.models.PigScript;
-import org.apache.ambari.view.pig.utils.HdfsApi;
 import org.apache.ambari.view.pig.utils.MisconfigurationFormattedException;
+import org.apache.ambari.view.utils.ViewUserLocal;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
 import org.json.simple.JSONObject;
 import org.junit.*;
 import org.junit.rules.ExpectedException;
@@ -54,7 +56,7 @@ public class ScriptTestHDFSUnmanaged extends HDFSTest {
   @AfterClass
   public static void shutDown() throws Exception {
     HDFSTest.shutDown(); // super
-    HdfsApi.dropAllConnections();
+    ViewUserLocal.dropAllConnections(HdfsApi.class);
   }
 
   @Override
@@ -63,7 +65,7 @@ public class ScriptTestHDFSUnmanaged extends HDFSTest {
     super.setUp();
     handler = createNiceMock(ViewResourceHandler.class);
     context = createNiceMock(ViewContext.class);
-    HdfsApi.dropAllConnections();
+    ViewUserLocal.dropAllConnections(HdfsApi.class);
     StorageUtil.dropAllConnections();
   }
 
@@ -149,11 +151,11 @@ public class ScriptTestHDFSUnmanaged extends HDFSTest {
     replay(context);
 
     // no webhdfs.username property
-    Assert.assertEquals("ambari-qa", HdfsApi.getHdfsUsername(context));
+    Assert.assertEquals("ambari-qa", HdfsUtil.getHdfsUsername(context));
 
     // with webhdfs.username property
     properties.put("webhdfs.username", "luke");
-    Assert.assertEquals("luke", HdfsApi.getHdfsUsername(context));
+    Assert.assertEquals("luke", HdfsUtil.getHdfsUsername(context));
   }
 
   private Response doCreateScript(String title, String path) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/3602b583/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestUnmanaged.java
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestUnmanaged.java b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestUnmanaged.java
index b4af66c..8322d2c 100644
--- a/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestUnmanaged.java
+++ b/contrib/views/pig/src/test/java/org/apache/ambari/view/pig/test/ScriptTestUnmanaged.java
@@ -21,10 +21,10 @@ package org.apache.ambari.view.pig.test;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.pig.BasePigTest;
-import org.apache.ambari.view.pig.resources.files.FileService;
 import org.apache.ambari.view.pig.resources.scripts.ScriptService;
-import org.apache.ambari.view.pig.utils.HdfsApi;
-import org.apache.ambari.view.pig.utils.MisconfigurationFormattedException;
+import org.apache.ambari.view.utils.ViewUserLocal;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.junit.*;
 import org.junit.rules.ExpectedException;
 
@@ -46,7 +46,7 @@ public class ScriptTestUnmanaged extends BasePigTest {
 
   @AfterClass
   public static void shutDown() throws Exception {
-    HdfsApi.dropAllConnections(); //cleanup API connection
+    ViewUserLocal.dropAllConnections(HdfsApi.class);
   }
 
   @Before
@@ -76,7 +76,7 @@ public class ScriptTestUnmanaged extends BasePigTest {
     replay(handler, context);
     scriptService = getService(ScriptService.class, handler, context);
 
-    thrown.expect(MisconfigurationFormattedException.class);
+    thrown.expect(HdfsApiException.class);
     doCreateScript("Test", null);
   }
 }