You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by rl...@apache.org on 2015/05/27 20:27:57 UTC

[2/2] ambari git commit: AMBARI-11391. Files View Should support NameNode HA (Erik Bergenholtz via rlevas)

AMBARI-11391. Files View Should support NameNode HA (Erik Bergenholtz via rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e28a9c07
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e28a9c07
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e28a9c07

Branch: refs/heads/trunk
Commit: e28a9c073c2cc0586d595162f53b684817b67218
Parents: 1b4bfaf
Author: Erik Bergenholtz <eb...@hortonworks.com>
Authored: Wed May 27 13:36:19 2015 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Wed May 27 13:36:26 2015 -0400

----------------------------------------------------------------------
 contrib/views/files/pom.xml                     | 238 +++++-----
 .../view/filebrowser/DownloadService.java       |   1 +
 .../view/filebrowser/FileOperationService.java  |   8 +-
 .../apache/ambari/view/filebrowser/HdfsApi.java | 441 ------------------
 .../ambari/view/filebrowser/HdfsService.java    |  20 +-
 .../ambari/view/filebrowser/HelpService.java    |   1 +
 .../view/filebrowser/PropertyValidator.java     |  10 +-
 .../ambari/view/filebrowser/UploadService.java  |   1 +
 contrib/views/files/src/main/resources/view.xml |  76 +++-
 .../view/filebrowser/HdfsServiceTest.java       |  49 --
 contrib/views/pom.xml                           |   2 +
 contrib/views/utils/pom.xml                     | 122 +++++
 contrib/views/utils/readme.md                   |  55 +++
 .../ambari/view/utils/ambari/AmbariApi.java     | 202 +++++++++
 .../view/utils/ambari/AmbariApiException.java   |  32 ++
 .../ambari/NoClusterAssociatedException.java    |  25 +
 .../ambari/view/utils/ambari/RemoteCluster.java | 104 +++++
 .../ambari/URLStreamProviderBasicAuth.java      |  89 ++++
 .../utils/hdfs/AuthConfigurationBuilder.java    |  98 ++++
 .../view/utils/hdfs/ConfigurationBuilder.java   | 197 ++++++++
 .../apache/ambari/view/utils/hdfs/HdfsApi.java  | 451 +++++++++++++++++++
 .../view/utils/hdfs/HdfsApiException.java       |  29 ++
 .../apache/ambari/view/utils/hdfs/HdfsUtil.java | 150 ++++++
 .../view/utils/ambari/RemoteClusterTest.java    | 137 ++++++
 .../ambari/URLStreamProviderBasicAuthTest.java  | 159 +++++++
 .../utils/hdfs/ConfigurationBuilderTest.java    |  51 +++
 26 files changed, 2094 insertions(+), 654 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/files/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/files/pom.xml b/contrib/views/files/pom.xml
index 5350c48..1cac902 100644
--- a/contrib/views/files/pom.xml
+++ b/contrib/views/files/pom.xml
@@ -15,107 +15,95 @@
    limitations under the License.
 -->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <groupId>org.apache.ambari.contrib.views</groupId>
-    <artifactId>files</artifactId>
-    <version>0.1.0-SNAPSHOT</version>
-    <name>Files</name>
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.ambari.contrib.views</groupId>
+  <artifactId>files</artifactId>
+  <version>0.2.0-SNAPSHOT</version>
+  <name>Files</name>
 
-    <parent>
-        <groupId>org.apache.ambari.contrib.views</groupId>
-        <artifactId>ambari-contrib-views</artifactId>
-        <version>2.0.0-SNAPSHOT</version>
-    </parent>
+  <parent>
+    <groupId>org.apache.ambari.contrib.views</groupId>
+    <artifactId>ambari-contrib-views</artifactId>
+    <version>2.0.0-SNAPSHOT</version>
+  </parent>
 
-    <dependencies>
+  <dependencies>
     <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-hdfs</artifactId>
-        <version>${hadoop-version}</version>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop-version}</version>
     </dependency>
     <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-common</artifactId>
-        <version>${hadoop-version}</version>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop-version}</version>
     </dependency>
     <dependency>
-        <groupId>junit</groupId>
-        <artifactId>junit</artifactId>
-        <scope>test</scope>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
     </dependency>
     <dependency>
-        <groupId>org.easymock</groupId>
-        <artifactId>easymock</artifactId>
-        <scope>test</scope>
+      <groupId>org.easymock</groupId>
+      <artifactId>easymock</artifactId>
+      <scope>test</scope>
     </dependency>
     <dependency>
-        <groupId>com.google.inject</groupId>
-        <artifactId>guice</artifactId>
+      <groupId>com.google.inject</groupId>
+      <artifactId>guice</artifactId>
     </dependency>
+
     <dependency>
-        <groupId>org.glassfish.jersey.containers</groupId>
-        <artifactId>jersey-container-servlet</artifactId>
+      <groupId>com.sun.jersey.contribs</groupId>
+      <artifactId>jersey-multipart</artifactId>
     </dependency>
     <dependency>
-        <groupId>com.sun.jersey.contribs</groupId>
-        <artifactId>jersey-multipart</artifactId>
-        <version>1.18</version>
+      <groupId>com.googlecode.json-simple</groupId>
+      <artifactId>json-simple</artifactId>
     </dependency>
     <dependency>
-        <groupId>com.googlecode.json-simple</groupId>
-        <artifactId>json-simple</artifactId>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <version>${hadoop-version}</version>
+      <scope>test</scope>
     </dependency>
+
     <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-minicluster</artifactId>
-        <version>${hadoop-version}</version>
-        <scope>test</scope>
+      <groupId>com.sun.jersey.jersey-test-framework</groupId>
+      <artifactId>jersey-test-framework-core</artifactId>
+      <scope>test</scope>
     </dependency>
-
     <dependency>
-        <groupId>org.glassfish.jersey.test-framework</groupId>
-        <artifactId>jersey-test-framework-core</artifactId>
-        <version>2.6</version>
-        <scope>test</scope>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-views</artifactId>
+      <scope>provided</scope>
     </dependency>
     <dependency>
-        <groupId>org.glassfish.jersey.test-framework.providers</groupId>
-        <artifactId>jersey-test-framework-provider-grizzly2</artifactId>
-        <version>2.6</version>
-        <scope>test</scope>
+      <groupId>org.apache.ambari.contrib.views</groupId>
+      <artifactId>ambari-views-utils</artifactId>
+      <version>0.0.1-SNAPSHOT</version>
     </dependency>
     <dependency>
-        <groupId>
-        org.glassfish.jersey.test-framework.providers
-        </groupId>
-        <artifactId>
-        jersey-test-framework-provider-bundle
-        </artifactId>
-        <version>2.6</version>
-        <scope>test</scope>
-        <type>pom</type>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+      <version>2.2.2</version>
     </dependency>
     <dependency>
-        <groupId>org.apache.ambari</groupId>
-        <artifactId>ambari-views</artifactId>
-        <scope>provided</scope>
+      <groupId>org.glassfish.jersey.containers</groupId>
+      <artifactId>jersey-container-servlet</artifactId>
+      <scope>provided</scope>
     </dependency>
-        <dependency>
-            <groupId>com.google.code.gson</groupId>
-            <artifactId>gson</artifactId>
-            <version>2.2.2</version>
-        </dependency>
-    </dependencies>
+  </dependencies>
 
-    <properties>
-      <ambari.dir>${project.parent.parent.parent.basedir}</ambari.dir>
-      <hadoop-version>2.2.0</hadoop-version>
-      <nodejs.directory>${basedir}/target/nodejs</nodejs.directory>
-      <npm.version>1.4.3</npm.version>
-      <ui.directory>${basedir}/src/main/resources/ui</ui.directory>
-    </properties>
-    <build>
+  <properties>
+    <ambari.dir>${project.parent.parent.parent.basedir}</ambari.dir>
+    <hadoop-version>2.6.0</hadoop-version>
+    <nodejs.directory>${basedir}/target/nodejs</nodejs.directory>
+    <npm.version>1.4.3</npm.version>
+    <ui.directory>${basedir}/src/main/resources/ui</ui.directory>
+  </properties>
+  <build>
 
     <plugins>
       <plugin>
@@ -227,99 +215,99 @@
         </executions>
       </plugin>
       <plugin>
-         <groupId>org.vafer</groupId>
-         <artifactId>jdeb</artifactId>
-         <version>1.0.1</version>
-         <executions>
-             <execution>
-                 <phase>none</phase>
-                 <goals>
-                     <goal>jdeb</goal>
-                 </goals>
-             </execution>
-         </executions>
-         <configuration>
-             <skip>true</skip>
-             <submodules>false</submodules>
-         </configuration>
-     </plugin>
+        <groupId>org.vafer</groupId>
+        <artifactId>jdeb</artifactId>
+        <version>1.0.1</version>
+        <executions>
+          <execution>
+            <phase>none</phase>
+            <goals>
+              <goal>jdeb</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <skip>true</skip>
+          <submodules>false</submodules>
+        </configuration>
+      </plugin>
     </plugins>
     <resources>
-        <resource>
+      <resource>
         <directory>src/main/resources/ui/public</directory>
         <filtering>false</filtering>
-        </resource>
+      </resource>
 
-        <resource>
+      <resource>
         <directory>src/main/resources/</directory>
         <filtering>false</filtering>
         <includes>
-            <include>view.xml</include>
+          <include>view.xml</include>
         </includes>
-        </resource>
+      </resource>
 
-        <resource>
-          <targetPath>WEB-INF/lib</targetPath>
-          <filtering>false</filtering>
-          <directory>target/lib</directory>
-        </resource>
+      <resource>
+        <targetPath>WEB-INF/lib</targetPath>
+        <filtering>false</filtering>
+        <directory>target/lib</directory>
+      </resource>
     </resources>
     <pluginManagement>
-        <plugins>
+      <plugins>
         <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
         <plugin>
-            <groupId>org.eclipse.m2e</groupId>
-            <artifactId>lifecycle-mapping</artifactId>
-            <version>1.0.0</version>
-            <configuration>
+          <groupId>org.eclipse.m2e</groupId>
+          <artifactId>lifecycle-mapping</artifactId>
+          <version>1.0.0</version>
+          <configuration>
             <lifecycleMappingMetadata>
-                <pluginExecutions>
+              <pluginExecutions>
                 <pluginExecution>
-                    <pluginExecutionFilter>
+                  <pluginExecutionFilter>
                     <groupId>
-                        org.codehaus.mojo
+                      org.codehaus.mojo
                     </groupId>
                     <artifactId>
-                        exec-maven-plugin
+                      exec-maven-plugin
                     </artifactId>
                     <versionRange>
-                        [1.2.1,)
+                      [1.2.1,)
                     </versionRange>
                     <goals>
-                        <goal>exec</goal>
+                      <goal>exec</goal>
                     </goals>
-                    </pluginExecutionFilter>
-                    <action>
+                  </pluginExecutionFilter>
+                  <action>
                     <ignore></ignore>
-                    </action>
+                  </action>
                 </pluginExecution>
                 <pluginExecution>
-                    <pluginExecutionFilter>
+                  <pluginExecutionFilter>
                     <groupId>
-                        com.github.eirslett
+                      com.github.eirslett
                     </groupId>
                     <artifactId>
-                        frontend-maven-plugin
+                      frontend-maven-plugin
                     </artifactId>
                     <versionRange>
-                        [0.0.14,)
+                      [0.0.14,)
                     </versionRange>
                     <goals>
-                        <goal>
+                      <goal>
                         install-node-and-npm
-                        </goal>
-                        <goal>npm</goal>
+                      </goal>
+                      <goal>npm</goal>
                     </goals>
-                    </pluginExecutionFilter>
-                    <action>
+                  </pluginExecutionFilter>
+                  <action>
                     <ignore></ignore>
-                    </action>
+                  </action>
                 </pluginExecution>
-                </pluginExecutions>
+              </pluginExecutions>
             </lifecycleMappingMetadata>
-            </configuration>
+          </configuration>
         </plugin>
-        </plugins>
+      </plugins>
     </pluginManagement>
   </build>
   <profiles>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
index 1685450..7395f8f 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
@@ -48,6 +48,7 @@ import javax.xml.bind.annotation.XmlElement;
 import com.google.gson.Gson;
 import org.apache.ambari.view.filebrowser.utils.NotFoundFormattedException;
 import org.apache.ambari.view.filebrowser.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.ambari.view.ViewContext;

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java
index ded3684..fd07da6 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java
@@ -33,6 +33,8 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.filebrowser.utils.NotFoundFormattedException;
 import org.apache.ambari.view.filebrowser.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.json.simple.JSONObject;
 
 /**
@@ -139,10 +141,12 @@ public class FileOperationService extends HdfsService {
     try {
       HdfsApi api = getApi(context);
       ResponseBuilder result;
-      if (api.copy(request.src, request.dst)) {
+      try {
+        api.copy(request.src, request.dst);
+
         result = Response.ok(getApi(context).fileStatusToJSON(api
             .getFileStatus(request.dst)));
-      } else {
+      } catch (HdfsApiException e) {
         result = Response.ok(new BoolResult(false)).status(422);
       }
       return result.build();

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
deleted file mode 100644
index b521085..0000000
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
+++ /dev/null
@@ -1,441 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.filebrowser;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
-import org.apache.hadoop.fs.permission.FsAction;
-import org.apache.hadoop.fs.permission.FsPermission;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.net.URI;
-import java.security.PrivilegedExceptionAction;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.hadoop.security.UserGroupInformation;
-import org.json.simple.JSONArray;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.LinkedHashMap;
-
-/**
- * Hdfs Business Delegate
- */
-public class HdfsApi {
-  protected static final Logger logger = LoggerFactory.getLogger(HdfsApi.class);
-
-  private final Configuration conf = new Configuration();
-  private final Map<String, String> params;
-
-  private FileSystem fs;
-  private UserGroupInformation ugi;
-
-  /**
-   * Constructor
-   * @param defaultFs hdfs uri
-   * @param params map of parameters
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public HdfsApi(final String defaultFs, String username, Map<String, String> params) throws IOException,
-      InterruptedException {
-    logger.info("Files View HdfsApi is connecting to '%s'", defaultFs);
-    this.params = params;
-    conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
-    conf.set("fs.webhdfs.impl", "org.apache.hadoop.hdfs.web.WebHdfsFileSystem");
-    conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
-
-    ugi = UserGroupInformation.createProxyUser(username, getProxyUser());
-
-    fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
-      public FileSystem run() throws IOException {
-        return FileSystem.get(URI.create(defaultFs), conf);
-      }
-    });
-  }
-
-  private UserGroupInformation getProxyUser() throws IOException {
-    UserGroupInformation proxyuser;
-    if (params.containsKey("proxyuser")) {
-      proxyuser = UserGroupInformation.createRemoteUser(params.get("proxyuser"));
-    } else {
-      proxyuser = UserGroupInformation.getCurrentUser();
-    }
-
-    proxyuser.setAuthenticationMethod(getAuthenticationMethod());
-    return proxyuser;
-  }
-
-  private UserGroupInformation.AuthenticationMethod getAuthenticationMethod() {
-    UserGroupInformation.AuthenticationMethod authMethod;
-    if (params.containsKey("auth")) {
-      authMethod = UserGroupInformation.AuthenticationMethod.valueOf(params.get("auth"));
-    } else {
-      authMethod = UserGroupInformation.AuthenticationMethod.SIMPLE;
-    }
-    return authMethod;
-  }
-
-  /**
-   * List dir operation
-   * @param path path
-   * @return array of FileStatus objects
-   * @throws FileNotFoundException
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public FileStatus[] listdir(final String path) throws FileNotFoundException,
-      IOException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<FileStatus[]>() {
-      public FileStatus[] run() throws FileNotFoundException, Exception {
-        return fs.listStatus(new Path(path));
-      }
-    });
-  }
-
-  /**
-   * Get file status
-   * @param path path
-   * @return file status
-   * @throws IOException
-   * @throws FileNotFoundException
-   * @throws InterruptedException
-   */
-  public FileStatus getFileStatus(final String path) throws IOException,
-      FileNotFoundException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<FileStatus>() {
-      public FileStatus run() throws FileNotFoundException, IOException {
-        return fs.getFileStatus(new Path(path));
-      }
-    });
-  }
-
-  /**
-   * Make directory
-   * @param path path
-   * @return success
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public boolean mkdir(final String path) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return fs.mkdirs(new Path(path));
-      }
-    });
-  }
-
-  /**
-   * Rename
-   * @param src source path
-   * @param dst destination path
-   * @return success
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public boolean rename(final String src, final String dst) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return fs.rename(new Path(src), new Path(dst));
-      }
-    });
-  }
-
-  /**
-   * Check is trash enabled
-   * @return true if trash is enabled
-   * @throws Exception
-   */
-  public boolean trashEnabled() throws Exception {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws IOException {
-        Trash tr = new Trash(fs, conf);
-        return tr.isEnabled();
-      }
-    });
-  }
-
-  /**
-   * Home directory
-   * @return home directory
-   * @throws Exception
-   */
-  public Path getHomeDir() throws Exception {
-    return ugi.doAs(new PrivilegedExceptionAction<Path>() {
-      public Path run() throws IOException {
-        return fs.getHomeDirectory();
-      }
-    });
-  }
-
-  /**
-   * Trash directory
-   * @return trash directory
-   * @throws Exception
-   */
-  public Path getTrashDir() throws Exception {
-    return ugi.doAs(new PrivilegedExceptionAction<Path>() {
-      public Path run() throws IOException {
-        TrashPolicy trashPolicy = TrashPolicy.getInstance(conf, fs,
-            fs.getHomeDirectory());
-        return trashPolicy.getCurrentTrashDir().getParent();
-      }
-    });
-  }
- 
-   /**
-    * Trash directory path.
-    *
-    * @return trash directory path
-    * @throws Exception
-    */
-  public String getTrashDirPath() throws Exception {
-    Path trashDir = getTrashDir();
-    
-    return  trashDir.toUri().getRawPath();
-  }
-
-   /**
-    * Trash directory path.
-    *
-    * @param    filePath        the path to the file
-    * @return trash directory path for the file
-    * @throws Exception
-    */
-  public String getTrashDirPath(String filePath) throws Exception {
-      String trashDirPath = getTrashDirPath();
-
-      Path path = new Path(filePath);
-      trashDirPath = trashDirPath+"/"+path.getName();
-      
-    return  trashDirPath;
-  }
-      
-  /**
-   * Empty trash
-   * @return
-   * @throws Exception
-   */
-  public Void emptyTrash() throws Exception {
-    return ugi.doAs(new PrivilegedExceptionAction<Void>() {
-      public Void run() throws IOException {
-        Trash tr = new Trash(fs, conf);
-        tr.expunge();
-        return null;
-      }
-    });
-  }
-
-  /**
-   * Move to trash
-   * @param path path
-   * @return success
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public boolean moveToTrash(final String path) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return Trash.moveToAppropriateTrash(fs, new Path(path), conf);
-      }
-    });
-  }
-
-  /**
-   * Delete
-   * @param path path
-   * @param recursive delete recursive
-   * @return success
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public boolean delete(final String path, final boolean recursive)
-      throws IOException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return fs.delete(new Path(path), recursive);
-      }
-    });
-  }
-
-  /**
-   * Create file
-   * @param path path
-   * @param overwrite overwrite existent file
-   * @return output stream
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public FSDataOutputStream create(final String path, final boolean overwrite)
-      throws IOException, InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
-      public FSDataOutputStream run() throws Exception {
-        return fs.create(new Path(path), overwrite);
-      }
-    });
-  }
-
-  /**
-   * Open file
-   * @param path path
-   * @return input stream
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public FSDataInputStream open(final String path) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<FSDataInputStream>() {
-      public FSDataInputStream run() throws Exception {
-        return fs.open(new Path(path));
-      }
-    });
-  }
-
-  /**
-   * Change permissions
-   * @param path path
-   * @param permissions permissions in format rwxrwxrwx
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public boolean chmod(final String path, final String permissions) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        try {
-          fs.setPermission(new Path(path), FsPermission.valueOf(permissions));
-        } catch (Exception ex) {
-          return false;
-        }
-        return true;
-      }
-    });
-  }
-
-  /**
-   * Copy file
-   * @param src source path
-   * @param dest destination path
-   * @return success
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public boolean copy(final String src, final String dest) throws IOException,
-      InterruptedException {
-    return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-      public Boolean run() throws Exception {
-        return FileUtil
-            .copy(fs, new Path(src), fs, new Path(dest), false, conf);
-      }
-    });
-  }
-
-  /**
-   * Converts a Hadoop permission into a Unix permission symbolic representation
-   * (i.e. -rwxr--r--) or default if the permission is NULL.
-   *
-   * @param p
-   *          Hadoop permission.
-   * @return the Unix permission symbolic representation or default if the
-   *         permission is NULL.
-   */
-  private static String permissionToString(FsPermission p) {
-    return (p == null) ? "default" : "-" + p.getUserAction().SYMBOL
-        + p.getGroupAction().SYMBOL + p.getOtherAction().SYMBOL;
-  }
-
-  /**
-   * Converts a Hadoop <code>FileStatus</code> object into a JSON array object.
-   * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
-   * specified URL.
-   * <p/>
-   *
-   * @param status
-   *          Hadoop file status.
-   * @return The JSON representation of the file status.
-   */
-
-  public Map<String, Object> fileStatusToJSON(FileStatus status) {
-    Map<String, Object> json = new LinkedHashMap<String, Object>();
-    json.put("path", Path.getPathWithoutSchemeAndAuthority(status.getPath())
-        .toString());
-    json.put("replication", status.getReplication());
-    json.put("isDirectory", status.isDirectory());
-    json.put("len", status.getLen());
-    json.put("owner", status.getOwner());
-    json.put("group", status.getGroup());
-    json.put("permission", permissionToString(status.getPermission()));
-    json.put("accessTime", status.getAccessTime());
-    json.put("modificationTime", status.getModificationTime());
-    json.put("blockSize", status.getBlockSize());
-    json.put("replication", status.getReplication());
-    json.put("readAccess", checkAccessPermissions(status, FsAction.READ, ugi));
-    json.put("writeAccess", checkAccessPermissions(status, FsAction.WRITE, ugi));
-    json.put("executeAccess", checkAccessPermissions(status, FsAction.EXECUTE, ugi));
-    return json;
-  }
-
-  /**
-   * Converts a Hadoop <code>FileStatus</code> array into a JSON array object.
-   * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
-   * specified URL.
-   * <p/>
-   *
-   * @param status
-   *          Hadoop file status array.
-   * @return The JSON representation of the file status array.
-   */
-  @SuppressWarnings("unchecked")
-  public JSONArray fileStatusToJSON(FileStatus[] status) {
-    JSONArray json = new JSONArray();
-    if (status != null) {
-      for (FileStatus s : status) {
-        json.add(fileStatusToJSON(s));
-      }
-    }
-    return json;
-  }
-
-  public static boolean checkAccessPermissions(FileStatus stat, FsAction mode, UserGroupInformation ugi) {
-    FsPermission perm = stat.getPermission();
-    String user = ugi.getShortUserName();
-    List<String> groups = Arrays.asList(ugi.getGroupNames());
-    if (user.equals(stat.getOwner())) {
-      if (perm.getUserAction().implies(mode)) {
-        return true;
-      }
-    } else if (groups.contains(stat.getGroup())) {
-      if (perm.getGroupAction().implies(mode)) {
-        return true;
-      }
-    } else {
-      if (perm.getOtherAction().implies(mode)) {
-        return true;
-      }
-    }
-    return false;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
index c66bd8f..073f13a 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
@@ -22,6 +22,8 @@ import javax.xml.bind.annotation.XmlRootElement;
 
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.filebrowser.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -67,13 +69,8 @@ public abstract class HdfsService {
    */
   public HdfsApi getApi(ViewContext context) {
     if (_api == null) {
-//      Thread.currentThread().setContextClassLoader(null);
-      String defaultFs = context.getProperties().get("webhdfs.url");
-
-      defaultFs = normalizeFsUrl(defaultFs);
-
       try {
-        _api = new HdfsApi(defaultFs, getDoAsUsername(context), getHdfsAuthParams(context));
+        _api = HdfsUtil.connectToHDFSApi(context);
       } catch (Exception ex) {
         throw new ServiceFormattedException("HdfsApi connection failed. Check \"webhdfs.url\" property", ex);
       }
@@ -81,17 +78,6 @@ public abstract class HdfsService {
     return _api;
   }
 
-  protected static String normalizeFsUrl(String defaultFs) {
-    //TODO: Don't add port if HA is enabled
-    if (!defaultFs.matches("^[^:]+://.*$"))
-      defaultFs = "webhdfs://" + defaultFs;
-
-    if (!defaultFs.matches("^.*:\\d+$"))
-      defaultFs = defaultFs + ":50070";
-
-    return defaultFs;
-  }
-
   private static Map<String, String> getHdfsAuthParams(ViewContext context) {
     String auth = context.getProperties().get("webhdfs.auth");
     Map<String, String> params = new HashMap<String, String>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
index 695ca38..adc99a4 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
@@ -30,6 +30,7 @@ import javax.ws.rs.core.Response;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.filebrowser.utils.NotFoundFormattedException;
 import org.apache.ambari.view.filebrowser.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
 
 /**
  * Help service

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/PropertyValidator.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/PropertyValidator.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/PropertyValidator.java
index abc4f2b..41b9b05 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/PropertyValidator.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/PropertyValidator.java
@@ -40,10 +40,12 @@ public class PropertyValidator implements Validator {
   public ValidationResult validateProperty(String property, ViewInstanceDefinition viewInstanceDefinition, ValidationContext validationContext) {
     if (property.equals(WEBHDFS_URL)) {
       String webhdfsUrl = viewInstanceDefinition.getPropertyMap().get(WEBHDFS_URL);
-      try {
-        new URI(webhdfsUrl);
-      } catch (URISyntaxException e) {
-        return new InvalidPropertyValidationResult(false, "Must be valid URL");
+      if (webhdfsUrl != null) {
+        try {
+          new URI(webhdfsUrl);
+        } catch (URISyntaxException e) {
+          return new InvalidPropertyValidationResult(false, "Must be valid URL");
+        }
       }
     }
     return ValidationResult.SUCCESS;

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java
index 051bdfb..0324796 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java
@@ -29,6 +29,7 @@ import javax.ws.rs.core.Response;
 
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.filebrowser.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.hadoop.fs.FSDataOutputStream;
 
 import com.sun.jersey.core.header.FormDataContentDisposition;

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/files/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/view.xml b/contrib/views/files/src/main/resources/view.xml
index dbd0643..1c8991e 100644
--- a/contrib/views/files/src/main/resources/view.xml
+++ b/contrib/views/files/src/main/resources/view.xml
@@ -17,7 +17,7 @@
 <view>
     <name>FILES</name>
     <label>Files</label>
-    <version>0.1.0</version>
+    <version>0.2.0</version>
 
     <min-ambari-version>2.0.*</min-ambari-version>
 
@@ -25,14 +25,69 @@
 
     <parameter>
         <name>webhdfs.url</name>
-        <description>Enter the WebHDFS FileSystem URI. Typically this is the dfs.namenode.http-address property in the hdfs-site.xml configuration. URL must be accessible from Ambari Server.</description>
+        <description>Enter the WebHDFS FileSystem URI. Typically this is the dfs.namenode.http-address
+            property in the hdfs-site.xml configuration. URL must be accessible from Ambari Server.</description>
         <label>WebHDFS FileSystem URI</label>
-        <placeholder>webhdfs://namenode:50070</placeholder>
-        <default-value>webhdfs://localhost:50070</default-value>
         <required>true</required>
-        <cluster-config>hdfs-site/dfs.namenode.http-address</cluster-config>
+        <cluster-config>core-site/fs.defaultFS</cluster-config>
     </parameter>
     <parameter>
+        <name>webhdfs.nameservices</name>
+        <description>Comma-separated list of nameservices. Value of hdfs-site/dfs.nameservices property</description>
+        <label>Logical name of the NameNode cluster</label>
+        <required>false</required>
+        <cluster-config>hdfs-site/dfs.nameservices</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenodes.list</name>
+        <description>Comma-separated list of namenodes for a given nameservice.
+            Value of hdfs-site/dfs.ha.namenodes.[nameservice] property</description>
+        <label>List of NameNodes</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.rpc-address.nn1</name>
+        <description>RPC address for first name node.
+            Value of hdfs-site/dfs.namenode.rpc-address.[nameservice].[namenode1] property</description>
+        <label>First NameNode RPC Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.rpc-address.nn2</name>
+        <description>RPC address for second name node.
+            Value of hdfs-site/dfs.namenode.rpc-address.[nameservice].[namenode2] property</description>
+        <label>Second NameNode RPC Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.http-address.nn1</name>
+        <description>WebHDFS address for first name node.
+            Value of hdfs-site/dfs.namenode.http-address.[nameservice].[namenode1] property</description>
+        <label>First NameNode HTTP (WebHDFS) Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.http-address.nn2</name>
+        <description>WebHDFS address for second name node.
+            Value of hdfs-site/dfs.namenode.http-address.[nameservice].[namenode2] property</description>
+        <label>Second NameNode HTTP (WebHDFS) Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.client.failover.proxy.provider</name>
+        <description>The Java class that HDFS clients use to contact the Active NameNode
+            Value of hdfs-site/dfs.client.failover.proxy.provider.[nameservice] property</description>
+        <label>Failover Proxy Provider</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+
+    <parameter>
         <name>webhdfs.username</name>
         <description>doAs for proxy user for HDFS. By default, uses the currently logged-in Ambari user.</description>
         <label>WebHDFS Username</label>
@@ -43,7 +98,6 @@
         <name>webhdfs.auth</name>
         <description>Semicolon-separated authentication configs.</description>
         <placeholder>auth=SIMPLE</placeholder>
-        <default-value>auth=SIMPLE</default-value>
         <label>WebHDFS Authorization</label>
         <required>false</required>
     </parameter>
@@ -52,14 +106,4 @@
         <name>files</name>
         <service-class>org.apache.ambari.view.filebrowser.FileBrowserService</service-class>
     </resource>
-
-    <auto-instance>
-        <name>AUTO_INSTANCE</name>
-        <label>Auto Create instance for the Files view</label>
-        <description>This view instance is auto created when the HDFS service is added to a cluster.</description>
-        <stack-id>HDP-2.*</stack-id>
-        <services>
-            <service>HDFS</service>
-        </services>
-    </auto-instance>
 </view>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/HdfsServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/HdfsServiceTest.java b/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/HdfsServiceTest.java
deleted file mode 100644
index cc02a3f..0000000
--- a/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/HdfsServiceTest.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.filebrowser;
-
-import org.junit.Test;
-
-import static org.junit.Assert.*;
-
-public class HdfsServiceTest {
-  @Test
-  public void testNormalizeFsUrlWithoutProtocol() throws Exception {
-    String normalized = HdfsService.normalizeFsUrl("namenode.example.com:50070");
-    assertEquals(normalized, "webhdfs://namenode.example.com:50070");
-  }
-
-  @Test
-  public void testNormalizeFsUrlWithoutPort() throws Exception {
-    String normalized = HdfsService.normalizeFsUrl("webhdfs://namenode.example.com");
-    assertEquals(normalized, "webhdfs://namenode.example.com:50070");
-  }
-
-  @Test
-  public void testNormalizeFsUrlOnlyHostname() throws Exception {
-    String normalized = HdfsService.normalizeFsUrl("namenode.example.com");
-    assertEquals(normalized, "webhdfs://namenode.example.com:50070");
-  }
-
-  @Test
-  public void testNormalizeFsUrlFixNoCorrectUrl() throws Exception {
-    String normalized = HdfsService.normalizeFsUrl("webhdfs://namenode.example.com:50070");
-    assertEquals(normalized, "webhdfs://namenode.example.com:50070");
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/pom.xml b/contrib/views/pom.xml
index 6eb6aeb..9fa698b 100644
--- a/contrib/views/pom.xml
+++ b/contrib/views/pom.xml
@@ -34,11 +34,13 @@
   </properties>
   <modules>
     <module>files</module>
+    <module>jobs</module>
     <module>pig</module>
     <module>slider</module>
     <module>capacity-scheduler</module>
     <module>hive</module>
     <module>tez</module>
+    <module>utils</module>
   </modules>
   <build>
     <pluginManagement>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/utils/pom.xml b/contrib/views/utils/pom.xml
new file mode 100644
index 0000000..d56a759
--- /dev/null
+++ b/contrib/views/utils/pom.xml
@@ -0,0 +1,122 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.ambari.contrib.views</groupId>
+  <artifactId>ambari-views-utils</artifactId>
+  <version>0.0.1-SNAPSHOT</version>
+  <name>Ambari View Utils</name>
+
+  <parent>
+    <groupId>org.apache.ambari.contrib.views</groupId>
+    <artifactId>ambari-contrib-views</artifactId>
+    <version>2.0.0-SNAPSHOT</version>
+  </parent>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.easymock</groupId>
+      <artifactId>easymock</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.inject</groupId>
+      <artifactId>guice</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.containers</groupId>
+      <artifactId>jersey-container-servlet</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey.contribs</groupId>
+      <artifactId>jersey-multipart</artifactId>
+      <version>1.18</version>
+    </dependency>
+    <dependency>
+      <groupId>com.googlecode.json-simple</groupId>
+      <artifactId>json-simple</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <version>${hadoop-version}</version>
+      <scope>test</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.glassfish.jersey.test-framework</groupId>
+      <artifactId>jersey-test-framework-core</artifactId>
+      <version>2.6</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.test-framework.providers</groupId>
+      <artifactId>jersey-test-framework-provider-grizzly2</artifactId>
+      <version>2.6</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-collections4</artifactId>
+      <version>4.0</version>
+    </dependency>
+    <dependency>
+      <groupId>
+        org.glassfish.jersey.test-framework.providers
+      </groupId>
+      <artifactId>
+        jersey-test-framework-provider-bundle
+      </artifactId>
+      <version>2.6</version>
+      <scope>test</scope>
+      <type>pom</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-views</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+      <version>2.2.2</version>
+    </dependency>
+  </dependencies>
+
+  <properties>
+    <ambari.dir>${project.parent.parent.parent.basedir}</ambari.dir>
+    <hadoop-version>2.2.0</hadoop-version>
+  </properties>
+  <build>
+  </build>
+</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/readme.md
----------------------------------------------------------------------
diff --git a/contrib/views/utils/readme.md b/contrib/views/utils/readme.md
new file mode 100644
index 0000000..9e465d7
--- /dev/null
+++ b/contrib/views/utils/readme.md
@@ -0,0 +1,55 @@
+<!---
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0)
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+Utils
+============
+
+Description
+-----
+This module provides common utils for views
+
+Requirements
+-----
+
+- Ambari 2.1.0 or later
+
+HDFS Utility
+-----
+
+HdfsApi class provides business delegate for HDFS client that provides proxyuser configuration.
+You can create the HdfsApi based on your ViewContext:
+
+    HdfsApi api = HdfsUtil.connectToHDFSApi(viewContext);
+
+It will read instance properties and create HdfsApi configured to specific cluster. NameNodes HA is supported.
+
+AmbariApi
+-----
+
+AmbariApi provides methods to get Ambari configurations and cluster topology.
+
+Custer association functionality:
+
+    AmbariApi api = new AmbariApi(viewContext);
+    Cluster cluster = api.getCluster();
+
+It can work with local cluster or with remote cluster based on your instance properties of Ambari URL,
+username and password in the ViewContext. To determine if you have associated cluster, either local or remote:
+
+    boolean isAssociated = api.isClusterAssociated();
+
+Also provides the API to get cluster topology:
+
+    List<String> nnHosts = api.getHostsWithComponent("NAMENODE");

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/AmbariApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/AmbariApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/AmbariApi.java
new file mode 100644
index 0000000..88e5f48
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/AmbariApi.java
@@ -0,0 +1,202 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.ambari;
+
+import org.apache.ambari.view.AmbariStreamProvider;
+import org.apache.ambari.view.URLStreamProvider;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.cluster.Cluster;
+import org.apache.commons.io.IOUtils;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Provides API to Ambari. Supports both Local and Remote cluster association.
+ * Also provides API to get cluster topology (determine what node contains specific service)
+ * on both local and remote cluster.
+ */
+public class AmbariApi {
+  public static final String AMBARI_SERVER_URL_INSTANCE_PROPERTY = "ambari.server.url";
+  public static final String AMBARI_SERVER_USERNAME_INSTANCE_PROPERTY = "ambari.server.username";
+  public static final String AMBARI_SERVER_PASSWORD_INSTANCE_PROPERTY = "ambari.server.password";
+
+  private Cluster cluster;
+  private ViewContext context;
+  private String remoteUrl;
+  private String remoteUsername;
+  private String remotePassword;
+
+  /**
+   * Constructor for Ambari API based on ViewContext
+   * @param context View Context
+   */
+  public AmbariApi(ViewContext context) {
+    this.context = context;
+
+    remoteUrl = context.getProperties().get(AMBARI_SERVER_URL_INSTANCE_PROPERTY);
+    remoteUsername = context.getProperties().get(AMBARI_SERVER_USERNAME_INSTANCE_PROPERTY);
+    remotePassword = context.getProperties().get(AMBARI_SERVER_PASSWORD_INSTANCE_PROPERTY);
+  }
+
+  /**
+   * Provides ability to get cluster topology
+   * @param requestComponent name of component
+   * @return list of hostnames with component
+   * @throws AmbariApiException
+   */
+  public List<String> getHostsWithComponent(String requestComponent) throws AmbariApiException {
+    String method = "hosts?fields=Hosts/public_host_name,host_components/HostRoles/component_name";
+    String response = readFromAmbari(method);
+
+    List<String> foundHosts = new ArrayList<String>();
+
+    JSONObject jsonObject = (JSONObject) JSONValue.parse(response);
+    JSONArray hosts = (JSONArray) jsonObject.get("items");
+    for (Object host : hosts) {
+      JSONObject hostJson = (JSONObject) host;
+      JSONArray hostComponents = (JSONArray) hostJson.get("host_components");
+      for (Object component : hostComponents) {
+        JSONObject componentJson = (JSONObject) component;
+        JSONObject hostRoles = (JSONObject) componentJson.get("HostRoles");
+        String componentName = (String) hostRoles.get("component_name");
+        if (componentName.equals(requestComponent)) {
+          foundHosts.add((String) hostRoles.get("host_name"));
+        }
+      }
+    }
+    return foundHosts;
+  }
+
+  /**
+   * Request to Ambari REST API. Supports both local and remote cluster
+   * @param method REST API path, e.g. /api/v1/clusters/mycluster?...
+   * @return response
+   * @throws AmbariApiException IO error or not associated with cluster
+   */
+  public String readFromAmbari(String method) throws AmbariApiException {
+    String response;
+
+    try {
+      InputStream inputStream;
+
+      if (isLocalCluster()) {
+        AmbariStreamProvider ambariStreamProvider = context.getAmbariStreamProvider();
+        String url = String.format("/api/v1/clusters/%s/%s", getCluster().getName(), method);
+        inputStream = ambariStreamProvider.readFrom(url, "GET", (String) null, null, true);
+
+      } else if (isRemoteCluster()) {
+        URLStreamProvider urlStreamProvider = getUrlStreamProviderBasicAuth();
+        String url = String.format("%s/%s", remoteUrl, method);
+        inputStream = urlStreamProvider.readFrom(url, "GET", (String) null, null);
+
+      } else {
+        throw new NoClusterAssociatedException(
+            "RA030 View is not associated with any cluster. No way to request Ambari.");
+      }
+
+      response = IOUtils.toString(inputStream);
+    } catch (IOException e) {
+      throw new AmbariApiException("RA040 I/O error while requesting Ambari", e);
+    }
+    return response;
+  }
+
+  /**
+   * Check if associated with local or remote cluster
+   * @return true if associated
+   */
+  public boolean isClusterAssociated() {
+    try {
+      getCluster();
+      return true;
+    } catch (NoClusterAssociatedException e) {
+      return false;
+    }
+  }
+
+  /**
+   * Cluster object that provides access for Ambari configuration
+   * @return cluster if locally associated or RemoteCluster
+   * @throws NoClusterAssociatedException
+   */
+  public Cluster getCluster() throws NoClusterAssociatedException {
+    if (cluster == null) {
+      if (isLocalCluster()) {
+        cluster = context.getCluster();
+
+      } else if (isRemoteCluster()) {
+        cluster = getRemoteCluster();
+
+      } else {
+        throw new NoClusterAssociatedException(
+            "RA050 View is not associated with any cluster. No way to request Ambari.");
+      }
+    }
+    return cluster;
+  }
+
+  /**
+   * Is associated with local cluster
+   * @return true if associated
+   */
+  public boolean isLocalCluster() {
+    return context.getCluster() != null;
+  }
+
+  /**
+   * Is associated with remote cluster
+   * @return true if associated
+   */
+  public boolean isRemoteCluster() {
+    return remoteUrl != null && !remoteUrl.isEmpty();
+  }
+
+  /**
+   * Build RemoteCluster instance based on viewContext properties
+   * @return RemoteCluster instance
+   */
+  public RemoteCluster getRemoteCluster() {
+    if (!isRemoteCluster())
+      return null;
+
+    URLStreamProvider urlStreamProviderBasicAuth = getUrlStreamProviderBasicAuth();
+    return new RemoteCluster(remoteUrl, urlStreamProviderBasicAuth);
+  }
+
+  /**
+   * Build URLStreamProvider with Basic Authentication for Remote Cluster
+   * @return URLStreamProvider
+   */
+  public URLStreamProvider getUrlStreamProviderBasicAuth() {
+    if (remoteUsername == null || remoteUsername.isEmpty() ||
+        remotePassword == null || remotePassword.isEmpty()) {
+      throw new AmbariApiException("RA020 Remote Ambari username and password are not filled");
+    }
+
+    URLStreamProvider urlStreamProvider = context.getURLStreamProvider();
+
+    return new URLStreamProviderBasicAuth(urlStreamProvider, remoteUsername, remotePassword);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/AmbariApiException.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/AmbariApiException.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/AmbariApiException.java
new file mode 100644
index 0000000..4ecc515
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/AmbariApiException.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.ambari;
+
+/**
+ * Exception during work with Ambari API
+ */
+public class AmbariApiException extends RuntimeException {
+  public AmbariApiException(String message) {
+    super(message);
+  }
+
+  public AmbariApiException(String message, Throwable cause) {
+    super(message, cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/NoClusterAssociatedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/NoClusterAssociatedException.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/NoClusterAssociatedException.java
new file mode 100644
index 0000000..be1efd3
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/NoClusterAssociatedException.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.ambari;
+
+public class NoClusterAssociatedException extends AmbariApiException {
+  public NoClusterAssociatedException(String message) {
+    super(message);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/RemoteCluster.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/RemoteCluster.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/RemoteCluster.java
new file mode 100644
index 0000000..abc71ab
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/RemoteCluster.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.ambari;
+
+import org.apache.ambari.view.URLStreamProvider;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.cluster.Cluster;
+import org.apache.commons.collections4.map.PassiveExpiringMap;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.io.input.NullInputStream;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.Collections;
+import java.util.Map;
+
+/**
+ * Class that provides same interface as local Cluster, but
+ * is able to retrieve configuration values by REST API
+ */
+public class RemoteCluster implements Cluster {
+  protected String name;
+  protected String baseUrl;
+  protected URLStreamProvider urlStreamProvider;
+  protected Map<String, JSONObject> configurationCache;
+
+  /**
+   * Constructor for RemoteCluster
+   * @param ambariClusterUrl Ambari Server Cluster REST API URL (for example: http://ambari.server:8080/api/v1/clusters/c1)
+   * @param urlStreamProvider stream provider with authorization support
+   */
+  public RemoteCluster(String ambariClusterUrl, URLStreamProvider urlStreamProvider) {
+    this.baseUrl = ambariClusterUrl;
+    this.urlStreamProvider = urlStreamProvider;
+
+    String[] parts = ambariClusterUrl.split("/");
+    this.name = parts[parts.length-1];
+    PassiveExpiringMap<String, JSONObject> configurations = new PassiveExpiringMap<String, JSONObject>(10000L);  // keep cache for 10 seconds
+    configurationCache = Collections.synchronizedMap(configurations);
+  }
+
+  @Override
+  public String getName() {
+    return name;
+  }
+
+  @Override
+  public String getConfigurationValue(String type, String key) {
+    JSONObject config;
+    try {
+      String desiredTag = getDesiredConfig(type);
+      config = readFromUrlJSON(String.format("%s/configurations?(type=%s&tag=%s)", baseUrl, type, desiredTag));
+    } catch (IOException e) {
+      throw new AmbariApiException("RA010 Can't retrieve configuration from Remote Ambari", e);
+    }
+
+    JSONObject items = (JSONObject) ((JSONArray) config.get("items")).get(0);
+    JSONObject properties = (JSONObject) items.get("properties");
+    return (String) properties.get(key);
+  }
+
+  private String getDesiredConfig(String type) throws IOException {
+    JSONObject desiredConfigResponse = readFromUrlJSON(
+        String.format("%s?fields=services/ServiceInfo,hosts,Clusters", baseUrl));
+    JSONObject clusters = (JSONObject) (desiredConfigResponse.get("Clusters"));
+    JSONObject desiredConfig = (JSONObject) (clusters.get("desired_configs"));
+    JSONObject desiredConfigForType = (JSONObject) desiredConfig.get(type);
+
+    return (String) desiredConfigForType.get("tag");
+  }
+
+  private JSONObject readFromUrlJSON(String url) throws IOException {
+    JSONObject jsonObject = configurationCache.get(url);
+    if (jsonObject == null) {
+      InputStream inputStream = urlStreamProvider.readFrom(url, "GET", (String)null, null);
+      String response = IOUtils.toString(inputStream);
+      jsonObject = (JSONObject) JSONValue.parse(response);
+
+      configurationCache.put(url, jsonObject);
+    }
+    return jsonObject;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/URLStreamProviderBasicAuth.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/URLStreamProviderBasicAuth.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/URLStreamProviderBasicAuth.java
new file mode 100644
index 0000000..87a4acb
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/ambari/URLStreamProviderBasicAuth.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.ambari;
+
+import org.apache.ambari.view.URLStreamProvider;
+import org.apache.commons.codec.binary.Base64;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Wrapper for URLStreamProvider that adds authentication header
+ */
+public class URLStreamProviderBasicAuth implements URLStreamProvider {
+  private URLStreamProvider urlStreamProvider;
+  private String username;
+  private String password;
+
+  public URLStreamProviderBasicAuth(URLStreamProvider urlStreamProvider, String username, String password) {
+    this.urlStreamProvider = urlStreamProvider;
+    this.username = username;
+    this.password = password;
+  }
+
+  @Override
+  public InputStream readFrom(String url, String method, String data, Map<String, String> headers) throws IOException {
+    return urlStreamProvider.readFrom(url, method, data, addAuthHeaders(headers));
+  }
+
+  @Override
+  public InputStream readFrom(String url, String method, InputStream data, Map<String, String> headers) throws IOException {
+    return urlStreamProvider.readFrom(url, method, data, addAuthHeaders(headers));
+  }
+
+  @Override
+  public InputStream readAs(String url, String method, String data, Map<String, String> headers, String doAs) throws IOException {
+    return urlStreamProvider.readAs(url, method, data, addAuthHeaders(headers), doAs);
+  }
+
+  @Override
+  public InputStream readAs(String url, String method, InputStream data, Map<String, String> headers, String doAs) throws IOException {
+    return urlStreamProvider.readAs(url, method, data, addAuthHeaders(headers), doAs);
+  }
+
+  @Override
+  public InputStream readAsCurrent(String url, String method, String data, Map<String, String> headers) throws IOException {
+    return urlStreamProvider.readAsCurrent(url, method, data, addAuthHeaders(headers));
+  }
+
+  @Override
+  public InputStream readAsCurrent(String url, String method, InputStream data, Map<String, String> headers) throws IOException {
+    return urlStreamProvider.readAsCurrent(url, method, data, addAuthHeaders(headers));
+  }
+
+  private HashMap<String, String> addAuthHeaders(Map<String, String> customHeaders) {
+    HashMap<String, String> newHeaders = new HashMap<String, String>();
+    if (customHeaders != null)
+      newHeaders.putAll(customHeaders);
+
+    String authString = username + ":" + password;
+    byte[] authEncBytes = Base64.encodeBase64(authString.getBytes());
+    String authStringEnc = new String(authEncBytes);
+
+    newHeaders.put("Authorization", "Basic " + authStringEnc);
+    newHeaders.put("X-Requested-By", "views");
+    return newHeaders;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/AuthConfigurationBuilder.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/AuthConfigurationBuilder.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/AuthConfigurationBuilder.java
new file mode 100644
index 0000000..c8ca6cd
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/AuthConfigurationBuilder.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.hdfs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.utils.ambari.AmbariApi;
+import org.apache.ambari.view.utils.ambari.NoClusterAssociatedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Builds the Authentication parameters of HDFS based on ViewContext.
+ * Currently supports only SIMPLE authorization. KERBEROS is not supported
+ * because proxyuser can be arbitrary, so can't be determined from configuration.
+ */
+public class AuthConfigurationBuilder {
+
+  protected static final Logger LOG = LoggerFactory.getLogger(AuthConfigurationBuilder.class);
+  private Map<String, String> params = new HashMap<String, String>();
+
+  private ViewContext context;
+  private AmbariApi ambariApi;
+
+  public AuthConfigurationBuilder(ViewContext context) {
+    this.context = context;
+    this.ambariApi = new AmbariApi(context);
+  }
+
+  /**
+   * Converts auth params as semicolon separated string to Map.
+   * If auth params are not provided, tries to determine them
+   * from Ambari configuration.
+   */
+  private void parseProperties() throws HdfsApiException {
+    String auth;
+    auth = context.getProperties().get("webhdfs.auth");
+
+    if (auth == null || auth.isEmpty()) {
+      try {
+        auth = getConfigurationFromAmbari();
+      } catch (NoClusterAssociatedException e) {
+        auth = "auth=SIMPLE";
+        LOG.warn(String.format("HDFS090 Authentication parameters could not be determined. %s assumed.", auth));
+      }
+    }
+
+    parseAuthString(auth);
+  }
+
+  private void parseAuthString(String auth) {
+    for (String param : auth.split(";")) {
+      String[] keyvalue = param.split("=");
+      if (keyvalue.length != 2) {
+        LOG.error("HDFS050 Can not parse authentication param " + param + " in " + auth);
+        continue;
+      }
+      params.put(keyvalue[0], keyvalue[1]);
+    }
+  }
+
+  /**
+   * Determine configuration from Ambari.
+   */
+  private String getConfigurationFromAmbari() throws NoClusterAssociatedException {
+    String authMethod = ambariApi.getCluster().getConfigurationValue(
+        "core-site", "hadoop.security.authentication");
+    return String.format("auth=%s", authMethod);
+  }
+
+  /**
+   * Build the auth configuration
+   * @return Map of auth properties
+   * @throws HdfsApiException
+   */
+  public Map<String, String> build() throws HdfsApiException {
+    parseProperties();
+    return params;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e28a9c07/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java
new file mode 100644
index 0000000..c3ff8bc
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.hdfs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.utils.ambari.AmbariApi;
+import org.apache.ambari.view.utils.ambari.NoClusterAssociatedException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+/**
+ * Builds the Configuration of HDFS based on ViewContext.
+ * Supports both directly specified properties and cluster associated
+ * properties loading.
+ */
+public class ConfigurationBuilder {
+  protected static final Logger LOG = LoggerFactory.getLogger(ConfigurationBuilder.class);
+  public static final String CORE_SITE = "core-site";
+  public static final String HDFS_SITE = "hdfs-site";
+
+  public static final String DEFAULT_FS_INSTANCE_PROPERTY = "webhdfs.url";
+  public static final String DEFAULT_FS_CLUSTER_PROPERTY  = "fs.defaultFS";
+
+  public static final String NAMESERVICES_INSTANCE_PROPERTY = "webhdfs.nameservices";
+  public static final String NAMESERVICES_CLUSTER_PROPERTY  = "dfs.nameservices";
+  public static final String HA_NAMENODES_INSTANCE_PROPERTY = "webhdfs.ha.namenodes.list";
+
+  public static final String HA_NAMENODES_CLUSTER_PROPERTY  = "dfs.ha.namenodes.%s";
+  public static final String NAMENODE_RPC_NN1_INSTANCE_PROPERTY = "webhdfs.ha.namenode.rpc-address.nn1";
+  public static final String NAMENODE_RPC_NN2_INSTANCE_PROPERTY = "webhdfs.ha.namenode.rpc-address.nn2";
+
+  public static final String NAMENODE_RPC_NN_CLUSTER_PROPERTY   = "dfs.namenode.rpc-address.%s.%s";
+  public static final String NAMENODE_HTTP_NN1_INSTANCE_PROPERTY = "webhdfs.ha.namenode.http-address.nn1";
+  public static final String NAMENODE_HTTP_NN2_INSTANCE_PROPERTY = "webhdfs.ha.namenode.http-address.nn2";
+
+  public static final String NAMENODE_HTTP_NN_CLUSTER_PROPERTY   = "dfs.namenode.http-address.%s.%s";
+  public static final String FAILOVER_PROXY_PROVIDER_INSTANCE_PROPERTY = "webhdfs.client.failover.proxy.provider";
+  public static final String FAILOVER_PROXY_PROVIDER_CLUSTER_PROPERTY  = "dfs.client.failover.proxy.provider.%s";
+
+  private Configuration conf = new Configuration();
+  private ViewContext context;
+  private AmbariApi ambariApi = null;
+
+  /**
+   * Constructor of ConfigurationBuilder based on ViewContext
+   * @param context ViewContext
+   */
+  public ConfigurationBuilder(ViewContext context) {
+    this.context = context;
+    ambariApi = new AmbariApi(context);
+  }
+
+  private void parseProperties() throws HdfsApiException {
+    String defaultFS = getDefaultFS(context);
+
+    try {
+
+      if (isHAEnabled(defaultFS)) {
+        copyHAProperties(defaultFS);
+
+        LOG.info("HA HDFS cluster found.");
+      } else {
+        if (!hasPort(defaultFS)) {
+          defaultFS = addPortIfMissing(defaultFS);
+        }
+      }
+
+      } catch (URISyntaxException e) {
+      throw new HdfsApiException("HDFS060 Invalid " + DEFAULT_FS_INSTANCE_PROPERTY +
+          "='" + defaultFS + "' URI", e);
+    }
+
+    conf.set("fs.defaultFS", defaultFS);
+    LOG.info(String.format("HdfsApi configured to connect to defaultFS='%s'", defaultFS));
+  }
+
+  private String getDefaultFS(ViewContext context) throws HdfsApiException {
+    String defaultFS = getProperty(CORE_SITE, DEFAULT_FS_CLUSTER_PROPERTY, DEFAULT_FS_INSTANCE_PROPERTY);
+
+    if (defaultFS == null || defaultFS.isEmpty())
+      throw new HdfsApiException("HDFS070 fs.defaultFS is not configured");
+
+    defaultFS = addProtocolIfMissing(defaultFS);
+    return defaultFS;
+  }
+
+  private String getProperty(String type, String key, String instanceProperty) {
+    String value;
+    try {
+      value = ambariApi.getCluster().getConfigurationValue(type, key);
+    } catch (NoClusterAssociatedException e) {
+      value = context.getProperties().get(instanceProperty);
+    }
+    return value;
+  }
+
+  private void copyHAProperties(String defaultFS) throws URISyntaxException, HdfsApiException {
+    URI uri = new URI(defaultFS);
+    String nameservice = uri.getHost();
+
+    copyClusterProperty(NAMESERVICES_CLUSTER_PROPERTY, NAMESERVICES_INSTANCE_PROPERTY);
+    String namenodeIDs = copyClusterProperty(String.format(HA_NAMENODES_CLUSTER_PROPERTY, nameservice),
+                                             HA_NAMENODES_INSTANCE_PROPERTY);
+
+    String[] namenodes = namenodeIDs.split(",");
+    if (namenodes.length != 2) {
+      throw new HdfsApiException("HDFS080 " + HA_NAMENODES_INSTANCE_PROPERTY + " namenodes count is not exactly 2");
+    }
+    //NN1
+    copyClusterProperty(String.format(NAMENODE_RPC_NN_CLUSTER_PROPERTY, nameservice, namenodes[0]),
+                        NAMENODE_RPC_NN1_INSTANCE_PROPERTY);
+    copyClusterProperty(String.format(NAMENODE_HTTP_NN_CLUSTER_PROPERTY, nameservice, namenodes[0]),
+                        NAMENODE_HTTP_NN1_INSTANCE_PROPERTY);
+
+    //NN2
+    copyClusterProperty(String.format(NAMENODE_RPC_NN_CLUSTER_PROPERTY, nameservice, namenodes[1]),
+                        NAMENODE_RPC_NN2_INSTANCE_PROPERTY);
+    copyClusterProperty(String.format(NAMENODE_HTTP_NN_CLUSTER_PROPERTY, nameservice, namenodes[1]),
+                        NAMENODE_HTTP_NN2_INSTANCE_PROPERTY);
+
+    copyClusterProperty(String.format(FAILOVER_PROXY_PROVIDER_CLUSTER_PROPERTY, nameservice),
+                        FAILOVER_PROXY_PROVIDER_INSTANCE_PROPERTY);
+  }
+
+  private String copyClusterProperty(String propertyName, String instancePropertyName) {
+    String value = getProperty(HDFS_SITE, propertyName, instancePropertyName);
+    conf.set(propertyName, value);
+    return value;
+  }
+
+  private boolean isHAEnabled(String defaultFS) throws URISyntaxException {
+    URI uri = new URI(defaultFS);
+    String nameservice = uri.getHost();
+    String namenodeIDs = getProperty(HDFS_SITE, String.format(HA_NAMENODES_CLUSTER_PROPERTY, nameservice),
+                                     HA_NAMENODES_INSTANCE_PROPERTY);
+    return namenodeIDs != null;
+  }
+
+  private static boolean hasPort(String url) throws URISyntaxException {
+    URI uri = new URI(url);
+    return uri.getPort() != -1;
+  }
+
+  protected static String addPortIfMissing(String defaultFs) throws URISyntaxException {
+    if (!hasPort(defaultFs)) {
+      defaultFs = defaultFs + ":50070";
+    }
+
+    return defaultFs;
+  }
+
+  protected static String addProtocolIfMissing(String defaultFs) {
+    if (!defaultFs.matches("^[^:]+://.*$")) {
+      defaultFs = "webhdfs://" + defaultFs;
+    }
+
+    return defaultFs;
+  }
+
+  /**
+   * Build the HDFS configuration
+   * @return configured HDFS Configuration object
+   * @throws HdfsApiException if configuration parsing failed
+   */
+  public Configuration build() throws HdfsApiException {
+    parseProperties();
+
+    conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
+    conf.set("fs.webhdfs.impl", WebHdfsFileSystem.class.getName());
+    conf.set("fs.file.impl", LocalFileSystem.class.getName());
+
+    return conf;
+  }
+}