You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by tb...@apache.org on 2014/05/03 01:41:23 UTC

[4/4] git commit: AMBARI-5617 - Ambari Views: FileBrowser view (Roman Rader via tbeerbower)

AMBARI-5617 - Ambari Views: FileBrowser view (Roman Rader via tbeerbower)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1f9f2a08
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1f9f2a08
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1f9f2a08

Branch: refs/heads/branch-1.6.0
Commit: 1f9f2a08c05642afaee759d50dc2308f4294db17
Parents: f0acfe5
Author: tbeerbower <tb...@hortonworks.com>
Authored: Fri May 2 19:37:05 2014 -0400
Committer: tbeerbower <tb...@hortonworks.com>
Committed: Fri May 2 19:39:21 2014 -0400

----------------------------------------------------------------------
 contrib/views/files/pom.xml                     | 258 ++++++++++++
 contrib/views/files/src/assembly/assembly.xml   |  26 ++
 .../view/filebrowser/DownloadService.java       | 282 +++++++++++++
 .../view/filebrowser/FileBrowserService.java    |  52 +++
 .../view/filebrowser/FileOperationService.java  | 181 ++++++++
 .../apache/ambari/view/filebrowser/HdfsApi.java | 237 +++++++++++
 .../ambari/view/filebrowser/HdfsService.java    |  58 +++
 .../ambari/view/filebrowser/HelpService.java    | 101 +++++
 .../ambari/view/filebrowser/UploadService.java  |  96 +++++
 .../files/src/main/resources/ui/.gitignore      |  34 ++
 .../files/src/main/resources/ui/app/adapter.js  | 351 ++++++++++++++++
 .../files/src/main/resources/ui/app/app.js      |  19 +
 .../ui/app/assets/fonts/fontawesome-webfont.svg | 414 +++++++++++++++++++
 .../fonts/glyphicons-halflings-regular.svg      | 229 ++++++++++
 .../src/main/resources/ui/app/assets/index.html |  34 ++
 .../resources/ui/app/components/contextMenu.js  |  38 ++
 .../resources/ui/app/components/uploader.js     | 103 +++++
 .../main/resources/ui/app/controllers/file.js   | 106 +++++
 .../main/resources/ui/app/controllers/files.js  | 150 +++++++
 .../src/main/resources/ui/app/initialize.js     |  75 ++++
 .../src/main/resources/ui/app/models/file.js    |  43 ++
 .../files/src/main/resources/ui/app/router.js   |  23 ++
 .../src/main/resources/ui/app/routes/error.js   |  21 +
 .../src/main/resources/ui/app/routes/file.js    |  40 ++
 .../resources/ui/app/styles/application.less    | 279 +++++++++++++
 .../resources/ui/app/templates/application.hbs  |  21 +
 .../main/resources/ui/app/templates/error.hbs   |  24 ++
 .../main/resources/ui/app/templates/files.hbs   | 254 ++++++++++++
 .../main/resources/ui/app/templates/index.hbs   |  18 +
 .../ui/app/templates/util/contextMenu.hbs       |  56 +++
 .../ui/app/templates/util/deleteBulk.hbs        |  38 ++
 .../ui/app/templates/util/deletePopover.hbs     |  38 ++
 .../ui/app/templates/util/uploader.hbs          |  35 ++
 .../src/main/resources/ui/app/views/file.js     | 211 ++++++++++
 contrib/views/files/src/main/resources/ui/bin   |   1 +
 .../files/src/main/resources/ui/bower.json      |  34 ++
 .../files/src/main/resources/ui/config.coffee   |  51 +++
 .../ui/generators/collection/collection.js.hbs  |  23 ++
 .../ui/generators/collection/generator.json     |   9 +
 .../ui/generators/controller/controller.js.hbs  |  23 ++
 .../ui/generators/controller/generator.json     |   9 +
 .../ui/generators/model/generator.json          |   9 +
 .../resources/ui/generators/model/model.js.hbs  |  23 ++
 .../ui/generators/route/generator.json          |   9 +
 .../resources/ui/generators/route/route.js.hbs  |  25 ++
 .../ui/generators/template/generator.json       |   9 +
 .../ui/generators/template/template.hbs.hbs     |  18 +
 .../resources/ui/generators/view/generator.json |   9 +
 .../resources/ui/generators/view/view.js.hbs    |  23 ++
 .../files/src/main/resources/ui/package.json    |  38 ++
 .../src/main/resources/ui/test/spec.coffee      |  17 +
 contrib/views/files/src/main/resources/view.xml |  42 ++
 .../view/filebrowser/FilebrowserTest.java       | 185 +++++++++
 contrib/views/pom.xml                           |   4 +-
 contrib/views/pom.xml.rej                       | 136 ++++++
 55 files changed, 4639 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1f9f2a08/contrib/views/files/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/files/pom.xml b/contrib/views/files/pom.xml
new file mode 100644
index 0000000..77e1727
--- /dev/null
+++ b/contrib/views/files/pom.xml
@@ -0,0 +1,258 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>org.apache.ambari.view</groupId>
+    <artifactId>files</artifactId>
+    <version>0.0.1-SNAPSHOT</version>
+    <name>Filebrowser</name>
+    
+    <parent>
+        <groupId>org.apache.ambari.views</groupId>
+        <artifactId>ambari-views-poc</artifactId>
+        <version>0.1.0-SNAPSHOT</version>
+    </parent>
+    
+    <dependencies>
+    <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdfs</artifactId>
+        <version>${hadoop-version}</version>
+    </dependency>
+    <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-common</artifactId>
+        <version>${hadoop-version}</version>
+    </dependency>
+    <dependency>
+        <groupId>junit</groupId>
+        <artifactId>junit</artifactId>
+        <scope>test</scope>
+    </dependency>
+    <dependency>
+        <groupId>org.easymock</groupId>
+        <artifactId>easymock</artifactId>
+        <scope>test</scope>
+    </dependency>
+    <dependency>
+        <groupId>com.google.inject</groupId>
+        <artifactId>guice</artifactId>
+    </dependency>
+    <dependency>
+        <groupId>org.glassfish.jersey.containers</groupId>
+        <artifactId>jersey-container-servlet</artifactId>
+    </dependency>
+    <dependency>
+        <groupId>com.sun.jersey.contribs</groupId>
+        <artifactId>jersey-multipart</artifactId>
+        <version>1.18</version>
+    </dependency>
+    <dependency>
+        <groupId>com.googlecode.json-simple</groupId>
+        <artifactId>json-simple</artifactId>
+    </dependency>
+    <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-minicluster</artifactId>
+        <version>${hadoop-version}</version>
+        <scope>test</scope>
+    </dependency>
+
+    <dependency>
+        <groupId>org.glassfish.jersey.test-framework</groupId>
+        <artifactId>jersey-test-framework-core</artifactId>
+        <version>2.6</version>
+        <scope>test</scope>
+    </dependency>
+    <dependency>
+        <groupId>org.glassfish.jersey.test-framework.providers</groupId>
+        <artifactId>jersey-test-framework-provider-grizzly2</artifactId>
+        <version>2.6</version>
+        <scope>test</scope>
+    </dependency>
+    <dependency>
+        <groupId>
+        org.glassfish.jersey.test-framework.providers
+        </groupId>
+        <artifactId>
+        jersey-test-framework-provider-bundle
+        </artifactId>
+        <version>2.6</version>
+        <scope>test</scope>
+        <type>pom</type>
+    </dependency>
+    <dependency>
+        <groupId>org.apache.ambari</groupId>
+        <artifactId>ambari-views</artifactId>
+    </dependency>
+        <dependency>
+            <groupId>com.google.code.gson</groupId>
+            <artifactId>gson</artifactId>
+            <version>2.2.2</version>
+        </dependency>
+    </dependencies>
+
+    <properties>
+    <hadoop-version>2.2.0</hadoop-version>
+    <nodejs.directory>${basedir}/target/nodejs</nodejs.directory>
+    <npm.version>1.4.3</npm.version>
+    <ui.directory>${basedir}/src/main/resources/ui</ui.directory>
+    </properties>
+    <build>
+
+    <plugins>
+        <plugin>
+        <groupId>com.github.eirslett</groupId>
+        <artifactId>frontend-maven-plugin</artifactId>
+        <version>0.0.14</version>
+
+        <!-- optional -->
+        <configuration>
+            <workingDirectory>src/main/resources/ui</workingDirectory>
+        </configuration>
+
+        <executions>
+            <execution>
+            <!-- optional: you don't really need execution ids, but it looks nice
+                 in your build log. -->
+            <id>install node and npm</id>
+            <goals>
+                <goal>install-node-and-npm</goal>
+            </goals>
+            <!-- optional: default phase is "generate-resources" -->
+            <phase>generate-resources</phase>
+            <configuration>
+                <nodeVersion>v0.10.26</nodeVersion>
+                <npmVersion>1.4.3</npmVersion>
+            </configuration>
+            </execution>
+            <execution>
+            <id>npm install</id>
+            <goals>
+                <goal>npm</goal>
+            </goals>
+
+            <!-- optional: default phase is "generate-resources" -->
+            <phase>generate-resources</phase>
+
+            <configuration>
+                <!-- optional: The default argument is actually "install", so unless
+                 you need to run some other npm command, you can remove this whole <configuration>
+                 section. -->
+                <arguments>install --unsafe-perm --registry=http://registry.npmjs.eu</arguments>
+            </configuration>
+            </execution>
+        </executions>
+        </plugin>
+        <plugin>
+        <artifactId>exec-maven-plugin</artifactId>
+        <groupId>org.codehaus.mojo</groupId>
+        <version>1.2.1</version>
+        <executions>
+            <execution>
+            <id>Brunch build</id>
+            <phase>generate-resources</phase>
+            <goals>
+                <goal>exec</goal>
+            </goals>
+            <configuration>
+                <workingDirectory>${basedir}/src/main/resources/ui</workingDirectory>
+                <executable>node/node</executable>
+                <arguments>
+                <argument>node_modules/.bin/brunch</argument>
+                <argument>build</argument>
+                                <argument>--production</argument>
+                </arguments>
+            </configuration>
+            </execution>
+        </executions>
+        </plugin>
+    </plugins>
+    <resources>
+        <resource>
+        <directory>src/main/resources/ui/public</directory>
+        <filtering>false</filtering>
+        </resource>
+
+        <resource>
+        <directory>src/main/resources/</directory>
+        <filtering>false</filtering>
+        <includes>
+            <include>view.xml</include>
+        </includes>
+        </resource>
+    </resources>
+    <pluginManagement>
+        <plugins>
+        <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
+        <plugin>
+            <groupId>org.eclipse.m2e</groupId>
+            <artifactId>lifecycle-mapping</artifactId>
+            <version>1.0.0</version>
+            <configuration>
+            <lifecycleMappingMetadata>
+                <pluginExecutions>
+                <pluginExecution>
+                    <pluginExecutionFilter>
+                    <groupId>
+                        org.codehaus.mojo
+                    </groupId>
+                    <artifactId>
+                        exec-maven-plugin
+                    </artifactId>
+                    <versionRange>
+                        [1.2.1,)
+                    </versionRange>
+                    <goals>
+                        <goal>exec</goal>
+                    </goals>
+                    </pluginExecutionFilter>
+                    <action>
+                    <ignore></ignore>
+                    </action>
+                </pluginExecution>
+                <pluginExecution>
+                    <pluginExecutionFilter>
+                    <groupId>
+                        com.github.eirslett
+                    </groupId>
+                    <artifactId>
+                        frontend-maven-plugin
+                    </artifactId>
+                    <versionRange>
+                        [0.0.14,)
+                    </versionRange>
+                    <goals>
+                        <goal>
+                        install-node-and-npm
+                        </goal>
+                        <goal>npm</goal>
+                    </goals>
+                    </pluginExecutionFilter>
+                    <action>
+                    <ignore></ignore>
+                    </action>
+                </pluginExecution>
+                </pluginExecutions>
+            </lifecycleMappingMetadata>
+            </configuration>
+        </plugin>
+        </plugins>
+    </pluginManagement>
+    </build>
+</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f9f2a08/contrib/views/files/src/assembly/assembly.xml
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/assembly/assembly.xml b/contrib/views/files/src/assembly/assembly.xml
new file mode 100644
index 0000000..afdcc37
--- /dev/null
+++ b/contrib/views/files/src/assembly/assembly.xml
@@ -0,0 +1,26 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<assembly
+	xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+	<containerDescriptorHandlers>
+		<containerDescriptorHandler>
+			<handlerName>metaInf-services</handlerName>
+		</containerDescriptorHandler>
+	</containerDescriptorHandlers>	
+</assembly>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f9f2a08/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
new file mode 100644
index 0000000..ca6ba66
--- /dev/null
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
@@ -0,0 +1,282 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.filebrowser;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.FileNameMap;
+import java.net.URLConnection;
+import java.util.LinkedList;
+import java.util.Queue;
+import java.util.UUID;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.ResponseBuilder;
+import javax.ws.rs.core.StreamingOutput;
+import javax.ws.rs.core.UriInfo;
+import javax.xml.bind.annotation.XmlElement;
+
+import com.google.gson.Gson;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.ambari.view.ViewContext;
+import org.json.simple.JSONObject;
+//import org.glassfish.jersey.server.ChunkedOutput;
+
+public class DownloadService extends HdfsService {
+
+    public static class DownloadRequest {
+        @XmlElement(nillable = false, required = true)
+        public String[] entries;
+        @XmlElement(required = false)
+        public boolean download;
+    }
+
+    public DownloadService(ViewContext context) {
+        super(context);
+    }
+
+    @GET
+    @Path("/browse")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response browse(@QueryParam("path") String path, @QueryParam("download") boolean download,
+        @Context HttpHeaders headers, @Context UriInfo ui) {
+        try {
+            HdfsApi api = getApi(context);
+            FileStatus status = api.getFileStatus(path);
+            FSDataInputStream fs = api.open(path);
+            ResponseBuilder result = Response.ok(fs);
+            if (download) {
+                result.header("Content-Disposition",
+                    "inline; filename=\"" + status.getPath().getName() + "\"").type(MediaType.APPLICATION_OCTET_STREAM);
+            } else {
+                FileNameMap fileNameMap = URLConnection.getFileNameMap();
+          String mimeType = fileNameMap.getContentTypeFor(status.getPath().getName());
+                result.header("Content-Disposition",
+                    "filename=\"" + status.getPath().getName() + "\"").type(mimeType);
+            }
+            return result.build();
+        } catch (FileNotFoundException ex) {
+            return Response.ok(Response.Status.NOT_FOUND.getStatusCode())
+                .entity(ex.getMessage()).build();
+        } catch (Exception ex) {
+            return Response.ok(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode())
+                .entity(ex.getMessage()).build();
+        }
+    }
+
+    private void zipFile(ZipOutputStream zip, String path)
+        throws InterruptedException, Exception {
+        try {
+            zip.putNextEntry(new ZipEntry(path.substring(1)));
+            FSDataInputStream in = getApi(context).open(path);
+            byte[] chunk = new byte[1024];
+            while (in.read(chunk) != -1) {
+                zip.write(chunk);
+            }
+        } catch (IOException ex) {
+            logger.error("Error zipping file " + path.substring(1) + ": "
+                + ex.getMessage());
+            zip.write(ex.getMessage().getBytes());
+        } finally {
+            zip.closeEntry();
+        }
+
+    }
+
+    private void zipDirectory(ZipOutputStream zip, String path) {
+        try {
+            zip.putNextEntry(new ZipEntry(path.substring(1) + "/"));
+            zip.closeEntry();
+        } catch (IOException e) {
+            logger.error("Error zipping directory " + path.substring(1) + "/" + ": "
+                + e.getMessage());
+        }
+    }
+
+    @POST
+    @Path("/zip")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_OCTET_STREAM)
+    public Response downloadGZip(final DownloadRequest request) {
+        StreamingOutput result = new StreamingOutput() {
+            public void write(OutputStream output) throws IOException,
+                WebApplicationException {
+                ZipOutputStream zip = new ZipOutputStream(output);
+                try {
+                    HdfsApi api = getApi(context);
+                    Queue<String> files = new LinkedList<String>();
+                    for (String file : request.entries) {
+                        files.add(file);
+                    }
+                    while (!files.isEmpty()) {
+                        String path = files.poll();
+                        FileStatus status = api.getFileStatus(path);
+                        if (status.isDirectory()) {
+                            FileStatus[] subdir = api.listdir(path);
+                            for (FileStatus file : subdir) {
+                                files.add(org.apache.hadoop.fs.Path
+                                    .getPathWithoutSchemeAndAuthority(file.getPath())
+                                    .toString());
+                            }
+                            zipDirectory(zip, path);
+                        } else {
+                            zipFile(zip, path);
+                        }
+                    }
+                } catch (Exception ex) {
+                    logger.error("Error occured: " + ex.getMessage());
+                } finally {
+                    zip.close();
+                }
+            }
+        };
+        return Response.ok(result)
+            .header("Content-Disposition", "inline; filename=\"hdfs.zip\"").build();
+    }
+
+    @POST
+    @Path("/concat")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_OCTET_STREAM)
+    public Response concat(final DownloadRequest request) {
+        StreamingOutput result = new StreamingOutput() {
+            public void write(OutputStream output) throws IOException,
+                WebApplicationException {
+                FSDataInputStream in = null;
+                for (String path : request.entries) {
+                    try {
+                        in = getApi(context).open(path);
+                        byte[] chunk = new byte[1024];
+                        while (in.read(chunk) != -1) {
+                            output.write(chunk);
+                        }
+                    } catch (Exception ex) {
+                        ex.printStackTrace();
+                    } finally {
+                        if (in != null)
+                            in.close();
+                    }
+                }
+            }
+        };
+        ResponseBuilder response = Response.ok(result);
+        if (request.download){
+            response.header("Content-Disposition", "inline; filename=\"concatResult.txt\"").type(MediaType.APPLICATION_OCTET_STREAM);
+        } else {
+            response.header("Content-Disposition", "filename=\"concatResult.txt\"").type(MediaType.TEXT_PLAIN);
+        }
+        return response.build();
+    }
+
+    // ===============================
+    // Download files by unique link
+
+    @GET
+    @Path("/zip")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_OCTET_STREAM)
+    public Response zipByRequestId(@QueryParam("requestId") String requestId) {
+        String json = context.getInstanceData(requestId);
+        DownloadRequest request = gson.fromJson(json, DownloadRequest.class);
+        context.removeInstanceData(requestId);
+        return downloadGZip(request);
+    }
+
+    @POST
+    @Path("/zip/generate-link")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response zipGenerateLink(final DownloadRequest request) {
+        String requestId = generateUniqueIdentifer(request);
+        JSONObject json = new JSONObject();
+        json.put("requestId", requestId);
+        return Response.ok(json).build();
+    }
+
+    @GET
+    @Path("/concat")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_OCTET_STREAM)
+    public Response concatByRequestId(@QueryParam("requestId") String requestId) {
+        String json = context.getInstanceData(requestId);
+        DownloadRequest request = gson.fromJson(json, DownloadRequest.class);
+        context.removeInstanceData(requestId);
+        return concat(request);
+    }
+
+    @POST
+    @Path("/concat/generate-link")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response concatGenerateLink(final DownloadRequest request) {
+        String requestId = generateUniqueIdentifer(request);
+        JSONObject json = new JSONObject();
+        json.put("requestId", requestId);
+        return Response.ok(json).build();
+    }
+
+    private Gson gson = new Gson();
+
+    private String generateUniqueIdentifer(DownloadRequest request) {
+        String uuid = UUID.randomUUID().toString().replaceAll("-", "");
+        String json = gson.toJson(request);
+        context.putInstanceData(uuid, json);
+        return uuid;
+    }
+
+    /*
+     * Temporary use Stream Output
+     *
+     * @POST
+     *
+     * @Path("/concat")
+     *
+     * @Consumes(MediaType.APPLICATION_JSON)
+     *
+     * @Produces(MediaType.APPLICATION_OCTET_STREAM) public ChunkedOutput<byte[]>
+     * concat(final DownloadRequest request) { final ChunkedOutput<byte[]> output
+     * = new ChunkedOutput<byte[]>(byte[].class);
+     *
+     * new Thread() { public void run() { try { FSDataInputStream in = null; for
+     * (String path : request.entries) { try { in = getApi(context).open(path);
+     * byte[] chunk = new byte[1024]; while (in.read(chunk) != -1) {
+     * output.write(chunk); } } finally { if (in != null) in.close(); }
+     *
+     * } } catch (Exception ex) { logger.error("Error occured: " +
+     * ex.getMessage()); } finally { try { output.close(); } catch (IOException e)
+     * { e.printStackTrace(); } } } }.start();
+     *
+     * return output; }
+     */
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f9f2a08/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java
new file mode 100644
index 0000000..d45d680
--- /dev/null
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.filebrowser;
+
+import javax.ws.rs.Path;
+
+import org.apache.ambari.view.ViewContext;
+
+import com.google.inject.Inject;
+
+public class FileBrowserService {
+
+    @Inject
+    ViewContext context;
+
+    @Path("/download")
+    public DownloadService download() {
+        return new DownloadService(context);
+    }
+
+    @Path("/upload")
+    public UploadService upload() {
+        return new UploadService(context);
+    }
+
+    @Path("/fileops")
+    public FileOperationService fileOps() {
+        return new FileOperationService(context);
+    }
+
+    @Path("/help")
+    public HelpService help() {
+        return new HelpService(context);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f9f2a08/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java
new file mode 100644
index 0000000..ce1f675
--- /dev/null
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.filebrowser;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.ResponseBuilder;
+import javax.ws.rs.core.UriInfo;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.ambari.view.ViewContext;
+
+public class FileOperationService extends HdfsService {
+
+    public FileOperationService(ViewContext context) {
+        super(context);
+    }
+
+    @XmlRootElement
+    public static class MkdirRequest {
+        @XmlElement(nillable = false, required = true)
+        public String path;
+    }
+
+
+    @XmlRootElement
+    public static class SrcDstFileRequest {
+        @XmlElement(nillable = false, required = true)
+        public String src;
+        @XmlElement(nillable = false, required = true)
+        public String dst;
+    }
+
+    @XmlRootElement
+    public static class RemoveRequest {
+        @XmlElement(nillable = false, required = true)
+        public String path;
+        public boolean recursive;
+    }
+
+    @GET
+    @Path("/listdir")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response listdir(@QueryParam("path") String path,
+        @Context HttpHeaders headers, @Context UriInfo ui) throws Exception {
+        try {
+            return Response.ok(
+                HdfsApi.fileStatusToJSON(getApi(context).listdir(path))).build();
+        } catch (FileNotFoundException ex) {
+            return Response.ok(Response.Status.NOT_FOUND.getStatusCode())
+                .entity(ex.getMessage()).build();
+        } catch (Throwable ex) {
+            throw new Exception(ex.getMessage());
+        }
+    }
+
+    @POST
+    @Path("/rename")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response rename(final SrcDstFileRequest request,
+        @Context HttpHeaders headers, @Context UriInfo ui) throws IOException,
+        Exception {
+        HdfsApi api = getApi(context);
+        ResponseBuilder result;
+        if (api.rename(request.src, request.dst)) {
+            result = Response.ok(HdfsApi.fileStatusToJSON(api
+                .getFileStatus(request.dst)));
+        } else {
+            result = Response.ok(new BoolResult(false)).status(422);
+        }
+        return result.build();
+    }
+
+    @POST
+    @Path("/copy")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response copy(final SrcDstFileRequest request,
+                         @Context HttpHeaders headers, @Context UriInfo ui) throws IOException,
+            Exception {
+        HdfsApi api = getApi(context);
+        ResponseBuilder result;
+        if (api.copy(request.src, request.dst)) {
+            result = Response.ok(HdfsApi.fileStatusToJSON(api
+                    .getFileStatus(request.dst)));
+        } else {
+            result = Response.ok(new BoolResult(false)).status(422);
+        }
+        return result.build();
+    }
+
+    @PUT
+    @Path("/mkdir")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response mkdir(final MkdirRequest request,
+        @Context HttpHeaders headers, @Context UriInfo ui) throws IOException,
+        Exception {
+        HdfsApi api = getApi(context);
+        ResponseBuilder result;
+        if (api.mkdir(request.path)) {
+            result = Response.ok(HdfsApi.fileStatusToJSON(api.getFileStatus(request.path)));
+        } else {
+            result = Response.ok(new BoolResult(false)).status(422);
+        }
+        return result.build();
+    }
+
+    @DELETE
+    @Path("/trash/emptyTrash")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response emptyTrash(@Context HttpHeaders headers,
+        @Context UriInfo ui) throws IOException, Exception {
+        HdfsApi api = getApi(context);
+        api.emptyTrash();
+        return Response.ok(new BoolResult(true)).build();
+    }
+
+    @DELETE
+    @Path("/moveToTrash")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response moveToTrash(RemoveRequest request, @Context HttpHeaders headers,
+        @Context UriInfo ui) throws IOException, Exception {
+        HdfsApi api = getApi(context);
+        ResponseBuilder result;
+        if (api.moveToTrash(request.path)){
+            result = Response.ok(new BoolResult(true)).status(204);
+        } else {
+            result = Response.ok(new BoolResult(false)).status(422);
+        }
+        return result.build();
+    }
+
+    @DELETE
+    @Path("/remove")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response remove(RemoveRequest request, @Context HttpHeaders headers,
+        @Context UriInfo ui) throws IOException, Exception {
+        HdfsApi api = getApi(context);
+        ResponseBuilder result;
+        if (api.delete(request.path, request.recursive)){
+            result = Response.ok(new BoolResult(true)).status(204);
+        } else {
+            result = Response.ok(new BoolResult(false)).status(422);
+        }
+        return result.build();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f9f2a08/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
new file mode 100644
index 0000000..b5d29c4
--- /dev/null
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
@@ -0,0 +1,237 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.filebrowser;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.permission.FsPermission;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.net.URI;
+import java.security.PrivilegedExceptionAction;
+import java.util.Map;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.json.simple.JSONArray;
+
+import java.util.LinkedHashMap;
+
+public class HdfsApi {
+    private final Configuration conf = new Configuration();
+
+    private FileSystem fs;
+    private UserGroupInformation ugi;
+
+    public HdfsApi(String defaultFs, String username) throws IOException,
+        InterruptedException {
+        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
+        conf.set("fs.webhdfs.impl", "org.apache.hadoop.hdfs.web.WebHdfsFileSystem");
+        conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
+        fs = FileSystem.get(URI.create(defaultFs), conf, username);
+        ugi = UserGroupInformation.createProxyUser(username,
+            UserGroupInformation.getLoginUser());
+    }
+
+    public FileStatus[] listdir(final String path) throws FileNotFoundException,
+        IOException, InterruptedException {
+        return ugi.doAs(new PrivilegedExceptionAction<FileStatus[]>() {
+            public FileStatus[] run() throws FileNotFoundException, Exception {
+                return fs.listStatus(new Path(path));
+            }
+        });
+    }
+
+    public FileStatus getFileStatus(final String path) throws IOException,
+        FileNotFoundException, InterruptedException {
+        return ugi.doAs(new PrivilegedExceptionAction<FileStatus>() {
+            public FileStatus run() throws FileNotFoundException, IOException {
+                return fs.getFileStatus(new Path(path));
+            }
+        });
+    }
+
+    public boolean mkdir(final String path) throws IOException,
+        InterruptedException {
+        return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+            public Boolean run() throws Exception {
+                return fs.mkdirs(new Path(path));
+            }
+        });
+    }
+
+    public boolean rename(final String src, final String dst) throws IOException,
+        InterruptedException {
+        return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+            public Boolean run() throws Exception {
+                return fs.rename(new Path(src), new Path(dst));
+            }
+        });
+    }
+
+    public boolean trashEnabled() throws Exception {
+        return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+            public Boolean run() throws IOException {
+                Trash tr = new Trash(fs, conf);
+                return tr.isEnabled();
+            }
+        });
+    }
+
+    public Path getHomeDir() throws Exception {
+        return ugi.doAs(new PrivilegedExceptionAction<Path>() {
+            public Path run() throws IOException {
+                return fs.getHomeDirectory();
+            }
+        });
+    }
+
+    public Path getTrashDir() throws Exception {
+        return ugi.doAs(new PrivilegedExceptionAction<Path>() {
+            public Path run() throws IOException {
+                TrashPolicy trashPolicy = TrashPolicy.getInstance(conf, fs,
+                    fs.getHomeDirectory());
+                return trashPolicy.getCurrentTrashDir().getParent();
+            }
+        });
+    }
+
+    public Void emptyTrash() throws Exception {
+        return ugi.doAs(new PrivilegedExceptionAction<Void>() {
+            public Void run() throws IOException {
+                Trash tr = new Trash(fs, conf);
+                tr.expunge();
+                return null;
+            }
+        });
+    }
+
+    public boolean moveToTrash(final String path) throws IOException,
+        InterruptedException {
+        return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+            public Boolean run() throws Exception {
+                return Trash.moveToAppropriateTrash(fs, new Path(path), conf);
+            }
+        });
+    }
+
+    public boolean delete(final String path, final boolean recursive)
+        throws IOException, InterruptedException {
+        return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+            public Boolean run() throws Exception {
+                return fs.delete(new Path(path), recursive);
+            }
+        });
+    }
+
+    public FSDataOutputStream create(final String path, final boolean overwrite)
+        throws IOException, InterruptedException {
+        return ugi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
+            public FSDataOutputStream run() throws Exception {
+                return fs.create(new Path(path), overwrite);
+            }
+        });
+    }
+
+    public FSDataInputStream open(final String path) throws IOException,
+        InterruptedException {
+        return ugi.doAs(new PrivilegedExceptionAction<FSDataInputStream>() {
+            public FSDataInputStream run() throws Exception {
+                return fs.open(new Path(path));
+            }
+        });
+    }
+
+    public boolean copy(final String src, final String dest) throws IOException,
+        InterruptedException {
+        return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+            public Boolean run() throws Exception {
+                return FileUtil
+                    .copy(fs, new Path(src), fs, new Path(dest), false, conf);
+            }
+        });
+    }
+
+    /**
+     * Converts a Hadoop permission into a Unix permission symbolic representation
+     * (i.e. -rwxr--r--) or default if the permission is NULL.
+     *
+     * @param p
+     *          Hadoop permission.
+     * @return the Unix permission symbolic representation or default if the
+     *         permission is NULL.
+     */
+    private static String permissionToString(FsPermission p) {
+        return (p == null) ? "default" : "-" + p.getUserAction().SYMBOL
+            + p.getGroupAction().SYMBOL + p.getOtherAction().SYMBOL;
+    }
+
+    /**
+     * Converts a Hadoop <code>FileStatus</code> object into a JSON array object.
+     * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
+     * specified URL.
+     * <p/>
+     *
+     * @param status
+     *          Hadoop file status.
+     * @param hoopBaseUrl
+     *          base URL to replace the <code>SCHEME://HOST:PORT</code> in the
+     *          file status.
+     * @return The JSON representation of the file status.
+     */
+
+    public static Map<String, Object> fileStatusToJSON(FileStatus status) {
+        Map<String, Object> json = new LinkedHashMap<String, Object>();
+        json.put("path", Path.getPathWithoutSchemeAndAuthority(status.getPath())
+            .toString());
+        json.put("replication", status.getReplication());
+        json.put("isDirectory", status.isDirectory());
+        json.put("len", status.getLen());
+        json.put("owner", status.getOwner());
+        json.put("group", status.getGroup());
+        json.put("permission", permissionToString(status.getPermission()));
+        json.put("accessTime", status.getAccessTime());
+        json.put("modificationTime", status.getModificationTime());
+        json.put("blockSize", status.getBlockSize());
+        json.put("replication", status.getReplication());
+        return json;
+    }
+
+    /**
+     * Converts a Hadoop <code>FileStatus</code> array into a JSON array object.
+     * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
+     * specified URL.
+     * <p/>
+     *
+     * @param status
+     *          Hadoop file status array.
+     * @return The JSON representation of the file status array.
+     */
+    @SuppressWarnings("unchecked")
+    public static JSONArray fileStatusToJSON(FileStatus[] status) {
+        JSONArray json = new JSONArray();
+        if (status != null) {
+            for (FileStatus s : status) {
+                json.add(fileStatusToJSON(s));
+            }
+        }
+        return json;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f9f2a08/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
new file mode 100644
index 0000000..fc71ad8
--- /dev/null
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.filebrowser;
+
+import java.io.IOException;
+
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.ambari.view.ViewContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class HdfsService {
+
+    protected static final Logger logger = LoggerFactory.getLogger(HdfsService.class);
+
+    protected final ViewContext context;
+
+    public HdfsService(ViewContext context) {
+        this.context = context;
+    }
+
+    @XmlRootElement
+    public static class BoolResult{
+        public boolean success;
+        public BoolResult(boolean success){
+            this.success = success;
+        }
+    }
+
+    private HdfsApi _api = null;
+
+    public HdfsApi getApi(ViewContext context) throws IOException, Exception {
+        if (_api == null) {
+            Thread.currentThread().setContextClassLoader(null);
+            _api = new HdfsApi(context.getProperties().get("dataworker.defaultFs")
+                .toString(), context.getUsername());
+        }
+        return _api;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f9f2a08/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
new file mode 100644
index 0000000..508e4c1
--- /dev/null
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.filebrowser;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.ambari.view.ViewContext;
+
+public class HelpService extends HdfsService {
+
+    public HelpService(ViewContext context) {
+        super(context);
+    }
+
+    @GET
+    @Path("/version")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response version(@Context HttpHeaders headers, @Context UriInfo ui) {
+        return Response.ok("0.0.1-SNAPSHOT").build();
+    }
+
+    @GET
+    @Path("/description")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response description(@Context HttpHeaders headers, @Context UriInfo ui) {
+        return Response.ok("Application to work with HDFS").build();
+    }
+
+    @GET
+    @Path("/filesystem")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response filesystem(@Context HttpHeaders headers, @Context UriInfo ui) {
+        return Response.ok(
+            context.getProperties().get("dataworker.defaultFs").toString()).build();
+    }
+
+    @GET
+    @Path("/home")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response homeDir(@Context HttpHeaders headers, @Context UriInfo ui)
+        throws FileNotFoundException, IOException, InterruptedException,
+        Exception {
+        HdfsApi api = getApi(context);
+        return Response
+            .ok(HdfsApi.fileStatusToJSON(api.getFileStatus(api.getHomeDir()
+                .toString()))).build();
+    }
+
+    @GET
+    @Path("/trash/enabled")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response trashEnabled(@Context HttpHeaders headers, @Context UriInfo ui)
+        throws Exception {
+        HdfsApi api = getApi(context);
+        return Response.ok(new BoolResult(api.trashEnabled())).build();
+    }
+
+    @GET
+    @Path("/trashDir")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response trashdir(@Context HttpHeaders headers, @Context UriInfo ui)
+        throws IOException, Exception {
+        HdfsApi api = getApi(context);
+        try {
+            return Response.ok(
+                HdfsApi.fileStatusToJSON(api.getFileStatus(api.getTrashDir()
+                    .toString()))).build();
+        } catch (FileNotFoundException ex) {
+            return Response.ok(new BoolResult(false)).status(Status.NOT_FOUND)
+                .build();
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f9f2a08/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java
new file mode 100644
index 0000000..fdcd0f2
--- /dev/null
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.filebrowser;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.hadoop.fs.FSDataOutputStream;
+
+import com.sun.jersey.core.header.FormDataContentDisposition;
+import com.sun.jersey.multipart.FormDataParam;
+
+public class UploadService extends HdfsService {
+
+    public UploadService(ViewContext context) {
+        super(context);
+    }
+
+    private void uploadFile(final String filePath, InputStream uploadedInputStream)
+        throws IOException, Exception {
+        byte[] chunk = new byte[1024];
+        FSDataOutputStream out = getApi(context).create(filePath, false);
+        while (uploadedInputStream.read(chunk) != -1) {
+            out.write(chunk);
+        }
+        out.close();
+    }
+
+    @PUT
+    @Consumes(MediaType.MULTIPART_FORM_DATA)
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response uploadFile(
+        @FormDataParam("file") InputStream uploadedInputStream,
+        @FormDataParam("file") FormDataContentDisposition contentDisposition,
+        @FormDataParam("path") String path) throws IOException, Exception {
+        if (!path.endsWith("/"))
+            path = path + "/";
+        String filePath = path + contentDisposition.getFileName();
+        uploadFile(filePath, uploadedInputStream);
+        return Response.ok(
+            HdfsApi.fileStatusToJSON(getApi(context).getFileStatus(filePath)))
+            .build();
+    }
+
+    @PUT
+    @Path("/zip")
+    @Consumes(MediaType.MULTIPART_FORM_DATA)
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response uploadZip(
+        @FormDataParam("file") InputStream uploadedInputStream,
+        @FormDataParam("file") FormDataContentDisposition contentDisposition,
+        @FormDataParam("path") String path) throws IOException, Exception {
+        if (!path.endsWith("/"))
+            path = path + "/";
+        ZipInputStream zip = new ZipInputStream(uploadedInputStream);
+        ZipEntry ze = zip.getNextEntry();
+        HdfsApi api = getApi(context);
+        while (ze != null) {
+            String filePath = path + ze.getName();
+            if (ze.isDirectory()) {
+                api.mkdir(filePath);
+            } else {
+                uploadFile(filePath, zip);
+            }
+            ze = zip.getNextEntry();
+        }
+        return Response.ok(HdfsApi.fileStatusToJSON(api.listdir(path))).build();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f9f2a08/contrib/views/files/src/main/resources/ui/.gitignore
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/.gitignore b/contrib/views/files/src/main/resources/ui/.gitignore
new file mode 100644
index 0000000..23e84db
--- /dev/null
+++ b/contrib/views/files/src/main/resources/ui/.gitignore
@@ -0,0 +1,34 @@
+# Numerous always-ignore extensions
+*.diff
+*.err
+*.orig
+*.log
+*.rej
+*.swo
+*.swp
+*.vi
+*~
+*.sass-cache
+
+# OS or Editor folders
+.DS_Store
+.cache
+.project
+.settings
+.tmproj
+nbproject
+Thumbs.db
+
+# NPM packages folder.
+node_modules/
+
+bower_components/
+
+node/
+
+# Brunch folder for temporary files.
+tmp/
+
+public/
+
+_generators/

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f9f2a08/contrib/views/files/src/main/resources/ui/app/adapter.js
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/adapter.js b/contrib/views/files/src/main/resources/ui/app/adapter.js
new file mode 100644
index 0000000..5805539
--- /dev/null
+++ b/contrib/views/files/src/main/resources/ui/app/adapter.js
@@ -0,0 +1,351 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+App = require('app');
+
+function promiseArray(promise, label) {
+  return Ember.ArrayProxy.extend(Ember.PromiseProxyMixin).create({
+    promise: Ember.RSVP.Promise.cast(promise, label)
+  });
+}
+
+
+function serializerForAdapter(adapter, type) {
+  var serializer = adapter.serializer,
+      defaultSerializer = adapter.defaultSerializer,
+      container = adapter.container;
+
+  if (container && serializer === undefined) {
+    serializer = serializerFor(container, type.typeKey, defaultSerializer);
+  }
+
+  if (serializer === null || serializer === undefined) {
+    serializer = {
+      extract: function(store, type, payload) { return payload; }
+    };
+  }
+
+  return serializer;
+}
+
+function serializerFor(container, type, defaultSerializer) {
+  return container.lookup('serializer:'+type) ||
+                 container.lookup('serializer:application') ||
+                 container.lookup('serializer:' + defaultSerializer) ||
+                 container.lookup('serializer:-default');
+}
+
+function _listdir(adapter, store, type, query, recordArray) {
+  var promise = adapter.listdir(store, type, query, recordArray),
+      serializer = serializerForAdapter(adapter, type),
+      label = "";
+
+  return Ember.RSVP.Promise.cast(promise, label).then(function(adapterPayload) {
+    var payload = serializer.extractArray(store, type, adapterPayload);
+
+    Ember.assert("The response from a findQuery must be an Array, not " + Ember.inspect(payload), Ember.typeOf(payload) === 'array');
+
+    recordArray.load(payload);
+    return recordArray;
+  }, null, "DS: Extract payload of findQuery " + type);
+}
+
+function _move(adapter, store, record, query) {
+  var type = store.modelFor('file'),
+      promise = adapter.move(store, type, record, query),
+      serializer = serializerForAdapter(adapter, type),
+      label = "";
+
+  return promise.then(function(adapterPayload) {
+    var payload;
+
+    if (adapterPayload) {
+      payload = serializer.extractSingle(store, type, adapterPayload);
+    } else {
+      payload = adapterPayload;
+    }
+
+    //TODO very shady activity :/
+    if (typeof record == 'object') {
+      store.unloadRecord(record);
+    }
+
+    return store.push('file', payload);
+  }, function(reason) {
+    if (reason instanceof DS.InvalidError) {
+      store.recordWasInvalid(record, reason.errors);
+    } else {
+      store.recordWasError(record, reason);
+    }
+
+    throw reason;
+  }, label);
+}
+
+function _mkdir(adapter, store, type, query) {
+  var promise = adapter.mkdir(store, type, query),
+      serializer = serializerForAdapter(adapter, type),
+      label = "";
+
+  return promise.then(function(adapterPayload) {
+    var payload;
+
+    if (adapterPayload) {
+      payload = serializer.extractSingle(store, type, adapterPayload);
+    } else {
+      payload = adapterPayload;
+    }
+
+    return store.push('file', payload);
+  }, function(reason) {
+    if (reason instanceof DS.InvalidError) {
+      store.recordWasInvalid(record, reason.errors);
+    } else {
+      store.recordWasError(record, reason);
+    }
+
+    throw reason;
+  }, label);
+}
+
+function _remove(adapter, store, record, query) {
+  var type = record.constructor;
+  var promise = adapter.remove(store, type, query),
+      serializer = serializerForAdapter(adapter, type),
+      label = "";
+
+  return promise.then(function(adapterPayload) {
+    store.unloadRecord(record);
+    return record;
+  }, function(reason) {
+    if (reason instanceof DS.InvalidError) {
+      store.recordWasInvalid(record, reason.errors);
+    } else {
+      store.recordWasError(record, reason);
+    }
+
+    throw reason;
+  }, label);
+}
+
+Ember.Inflector.inflector.uncountable('fileops');
+Ember.Inflector.inflector.uncountable('download');
+Ember.Inflector.inflector.uncountable('upload');
+
+App.Store = DS.Store.extend({
+  adapter: DS.RESTAdapter.extend({
+    namespace:'api/v1/views/FILE_BROWSER/instances/FILEBROWSER_1/resources/filebrowser',
+    headers: {
+      'X-Requested-By': 'ambari'
+    },
+    listdir: function(store, type, query) {
+      return this.ajax(this.buildURL('fileops','listdir'), 'GET', { data: query });
+    },
+    move:function (store, type, record, query) {
+      return this.ajax(this.buildURL('fileops','rename'), 'POST', { data: query });
+    },
+    mkdir:function (store, type, query) {
+      return this.ajax(this.buildURL('fileops','mkdir'), 'PUT', { data: query });
+    },
+    remove:function (store, type, query) {
+      return this.ajax(this.buildURL('fileops','remove'), 'DELETE', { data: query });
+    },
+    downloadUrl:function (option, query) {
+      return [this.buildURL('download',option),Em.$.param(query)].join('?');
+    },
+    linkFor:function (option, query) {
+      return this.ajax(this.buildURL('download',[option,'generate-link'].join('/')), 'POST', { data: query });
+    }
+  }),
+  listdir:function (path) {
+    var query = {path: path};
+    var type = this.modelFor('file');
+    var array = this.recordArrayManager
+      .createAdapterPopulatedRecordArray(type, query);
+    this.recordArrayManager.registerFilteredRecordArray(array, type);
+
+    var adapter = this.adapterFor(type);
+
+    Ember.assert("You tried to load a query but you have no adapter (for " + type + ")", adapter);
+    Ember.assert("You tried to load a query but your adapter does not implement `listdir`", adapter.listdir);
+
+    return promiseArray(_listdir(adapter, this, type, query, array));
+  },
+  move:function (record, path) {
+    var oldpath;
+    if (typeof record === 'string') {
+      oldpath = record;
+    } else {
+      oldpath = record.get('id');
+    }
+    var query = {
+      "src":oldpath,
+      "dst":path
+    };
+    var promiseLabel = "DS: Model#move " + this;
+    var resolver = Ember.RSVP.defer(promiseLabel);
+    var adapter = this.adapterFor(record.constructor);
+
+    resolver.resolve(_move(adapter, this, record, query));
+
+    return DS.PromiseObject.create({ promise: resolver.promise });
+  },
+  mkdir:function (path) {
+    var query = {
+      "path":path
+    };
+    var type = this.modelFor('file');
+    var promiseLabel = "DS: Model#mkdir " + this;
+    var resolver = Ember.RSVP.defer(promiseLabel);
+    var adapter = this.adapterFor(type);
+
+    resolver.resolve(_mkdir(adapter, this, type, query));
+
+    return DS.PromiseObject.create({ promise: resolver.promise });
+  },
+  remove:function (record) {
+    var query = {
+      "path":record.get('path'),
+      "recursive":true
+    };
+    var type = this.modelFor('file');
+    var promiseLabel = "DS: Model#remove " + this;
+    var resolver = Ember.RSVP.defer(promiseLabel);
+    var adapter = this.adapterFor(type);
+    
+    record.deleteRecord();
+    resolver.resolve(_remove(adapter, this, record, query));
+
+    return DS.PromiseObject.create({ promise: resolver.promise });
+  },
+  /**
+   * get dowload link
+   * @param  {Array} file     records for download
+   * @param  {String} option            browse, zip or concat
+   * @param  {Boolean} download
+   * @return {Promise}
+   */
+  linkFor:function (files, option, download) {
+    var resolver = Ember.RSVP.defer('promiseLabel');
+    var adapter = this.adapterFor(this.modelFor('file')),
+        download = download || true;
+        option = option || "browse";
+
+    if (option == 'browse') {
+      var query = { "path": files.get('firstObject.path'), "download": download };
+      resolver.resolve(adapter.downloadUrl('browse',query))
+      return resolver.promise;
+    };
+
+    var query = {
+      "entries": [],
+      "download": download
+    };
+
+    files.forEach(function (item) {
+      query.entries.push(item.get('path'));
+    });
+
+    resolver.resolve(adapter.linkFor(option, query))
+
+    return resolver.promise.then(function(response) {
+      return adapter.downloadUrl(option,response);
+    }, function(reason) {
+      //TODO reject
+    });
+  }
+})
+
+App.FileSerializer = DS.RESTSerializer.extend({
+  primaryKey:'path',
+  extractArray: function(store, type, payload, id, requestType) {
+    payload = {'files': payload};
+    return this._super(store, type, payload, id, requestType);
+  },
+  extractSingle: function(store, type, payload, id, requestType) {
+    payload = {'files': payload};
+    return this._super(store, type, payload, id, requestType);
+  }
+});
+
+App.Uploader = Ember.Uploader.create({
+  url: '',
+  type:'PUT',
+  upload: function(file,extraData) {
+    var data = this.setupFormData(file,extraData);
+    var url  = this.get('url');
+    var type = this.get('type');
+    var self = this;
+
+    this.set('isUploading', true);
+    
+    return this.ajax(url, data, type).then(function(respData) {
+      self.didUpload(respData);
+      return respData;
+    });
+  },
+  ajax: function(url, params, method) {
+    var self = this;
+    var settings = {
+      url: url,
+      type: method || 'POST',
+      contentType: false,
+      processData: false,
+      xhr: function() {
+        var xhr = Ember.$.ajaxSettings.xhr();
+        xhr.upload.onprogress = function(e) {
+          self.didProgress(e);
+        };
+        return xhr;
+      },
+      beforeSend:function (xhr) {
+        xhr.setRequestHeader('X-Requested-By', 'ambari');
+      },
+      data: params
+    };
+
+    return this._ajax(settings);
+  }
+});
+
+App.IsodateTransform = DS.Transform.extend({  
+  deserialize: function (serialized) {
+    if (serialized) {
+      return moment.utc(serialized).toDate();
+    }
+    return serialized;
+  },
+  serialize: function (deserialized) {
+    if (deserialized) {
+      return moment(deserialized).format('X');
+    }
+    return deserialized;
+  }
+});
+
+Ember.Handlebars.registerBoundHelper('showDate', function(date,format) {
+  return moment(date).format(format)
+});
+
+Ember.Handlebars.registerBoundHelper('showDateUnix', function(date,format) {
+  return moment.unix(date).format(format)
+});
+
+Ember.Handlebars.registerBoundHelper('capitalize', function(string) {
+  return string.capitalize();
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f9f2a08/contrib/views/files/src/main/resources/ui/app/app.js
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/app.js b/contrib/views/files/src/main/resources/ui/app/app.js
new file mode 100644
index 0000000..7041224
--- /dev/null
+++ b/contrib/views/files/src/main/resources/ui/app/app.js
@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+module.exports = Em.Application.create();