You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ja...@apache.org on 2014/05/12 20:48:17 UTC
git commit: AMBARI-5739. File View Cleanup. (jaimin)
Repository: ambari
Updated Branches:
refs/heads/branch-1.6.0 465ca755b -> ba392ad73
AMBARI-5739. File View Cleanup. (jaimin)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ba392ad7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ba392ad7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ba392ad7
Branch: refs/heads/branch-1.6.0
Commit: ba392ad73cf452ca72f9164083bad88e69c4710b
Parents: 465ca75
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Mon May 12 11:47:45 2014 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Mon May 12 11:47:45 2014 -0700
----------------------------------------------------------------------
.../view/filebrowser/DownloadService.java | 397 +++++++++-------
.../view/filebrowser/FileBrowserService.java | 55 ++-
.../view/filebrowser/FileOperationService.java | 318 +++++++------
.../apache/ambari/view/filebrowser/HdfsApi.java | 460 +++++++++++--------
.../ambari/view/filebrowser/HdfsService.java | 54 ++-
.../ambari/view/filebrowser/HelpService.java | 144 +++---
.../ambari/view/filebrowser/UploadService.java | 122 +++--
.../view/filebrowser/FilebrowserTest.java | 233 +++++-----
8 files changed, 1031 insertions(+), 752 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba392ad7/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
index ca6ba66..2f4d1b9 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
@@ -52,207 +52,243 @@ import org.apache.ambari.view.ViewContext;
import org.json.simple.JSONObject;
//import org.glassfish.jersey.server.ChunkedOutput;
+/**
+ * Service for download and aggregate files
+ */
public class DownloadService extends HdfsService {
- public static class DownloadRequest {
- @XmlElement(nillable = false, required = true)
- public String[] entries;
- @XmlElement(required = false)
- public boolean download;
+ public DownloadService(ViewContext context) {
+ super(context);
+ }
+
+ /**
+ * Download entire file
+ * @param path path to file
+ * @param download download as octet strem or as file mime type
+ * @param headers http headers
+ * @param ui uri info
+ * @return response with file
+ */
+ @GET
+ @Path("/browse")
+ @Produces(MediaType.TEXT_PLAIN)
+ public Response browse(@QueryParam("path") String path, @QueryParam("download") boolean download,
+ @Context HttpHeaders headers, @Context UriInfo ui) {
+ try {
+ HdfsApi api = getApi(context);
+ FileStatus status = api.getFileStatus(path);
+ FSDataInputStream fs = api.open(path);
+ ResponseBuilder result = Response.ok(fs);
+ if (download) {
+ result.header("Content-Disposition",
+ "inline; filename=\"" + status.getPath().getName() + "\"").type(MediaType.APPLICATION_OCTET_STREAM);
+ } else {
+ FileNameMap fileNameMap = URLConnection.getFileNameMap();
+ String mimeType = fileNameMap.getContentTypeFor(status.getPath().getName());
+ result.header("Content-Disposition",
+ "filename=\"" + status.getPath().getName() + "\"").type(mimeType);
+ }
+ return result.build();
+ } catch (FileNotFoundException ex) {
+ return Response.ok(Response.Status.NOT_FOUND.getStatusCode())
+ .entity(ex.getMessage()).build();
+ } catch (Exception ex) {
+ return Response.ok(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode())
+ .entity(ex.getMessage()).build();
}
+ }
- public DownloadService(ViewContext context) {
- super(context);
+ private void zipFile(ZipOutputStream zip, String path)
+ throws InterruptedException, Exception {
+ try {
+ zip.putNextEntry(new ZipEntry(path.substring(1)));
+ FSDataInputStream in = getApi(context).open(path);
+ byte[] chunk = new byte[1024];
+ while (in.read(chunk) != -1) {
+ zip.write(chunk);
+ }
+ } catch (IOException ex) {
+ logger.error("Error zipping file " + path.substring(1) + ": "
+ + ex.getMessage());
+ zip.write(ex.getMessage().getBytes());
+ } finally {
+ zip.closeEntry();
}
- @GET
- @Path("/browse")
- @Produces(MediaType.TEXT_PLAIN)
- public Response browse(@QueryParam("path") String path, @QueryParam("download") boolean download,
- @Context HttpHeaders headers, @Context UriInfo ui) {
+ }
+
+ private void zipDirectory(ZipOutputStream zip, String path) {
+ try {
+ zip.putNextEntry(new ZipEntry(path.substring(1) + "/"));
+ zip.closeEntry();
+ } catch (IOException e) {
+ logger.error("Error zipping directory " + path.substring(1) + "/" + ": "
+ + e.getMessage());
+ }
+ }
+
+ /**
+ * Download ZIP of passed file list
+ * @param request download request
+ * @return response with zip
+ */
+ @POST
+ @Path("/zip")
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_OCTET_STREAM)
+ public Response downloadGZip(final DownloadRequest request) {
+ StreamingOutput result = new StreamingOutput() {
+ public void write(OutputStream output) throws IOException,
+ WebApplicationException {
+ ZipOutputStream zip = new ZipOutputStream(output);
try {
- HdfsApi api = getApi(context);
+ HdfsApi api = getApi(context);
+ Queue<String> files = new LinkedList<String>();
+ for (String file : request.entries) {
+ files.add(file);
+ }
+ while (!files.isEmpty()) {
+ String path = files.poll();
FileStatus status = api.getFileStatus(path);
- FSDataInputStream fs = api.open(path);
- ResponseBuilder result = Response.ok(fs);
- if (download) {
- result.header("Content-Disposition",
- "inline; filename=\"" + status.getPath().getName() + "\"").type(MediaType.APPLICATION_OCTET_STREAM);
+ if (status.isDirectory()) {
+ FileStatus[] subdir = api.listdir(path);
+ for (FileStatus file : subdir) {
+ files.add(org.apache.hadoop.fs.Path
+ .getPathWithoutSchemeAndAuthority(file.getPath())
+ .toString());
+ }
+ zipDirectory(zip, path);
} else {
- FileNameMap fileNameMap = URLConnection.getFileNameMap();
- String mimeType = fileNameMap.getContentTypeFor(status.getPath().getName());
- result.header("Content-Disposition",
- "filename=\"" + status.getPath().getName() + "\"").type(mimeType);
+ zipFile(zip, path);
}
- return result.build();
- } catch (FileNotFoundException ex) {
- return Response.ok(Response.Status.NOT_FOUND.getStatusCode())
- .entity(ex.getMessage()).build();
+ }
} catch (Exception ex) {
- return Response.ok(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode())
- .entity(ex.getMessage()).build();
+ logger.error("Error occured: " + ex.getMessage());
+ } finally {
+ zip.close();
}
- }
+ }
+ };
+ return Response.ok(result)
+ .header("Content-Disposition", "inline; filename=\"hdfs.zip\"").build();
+ }
- private void zipFile(ZipOutputStream zip, String path)
- throws InterruptedException, Exception {
- try {
- zip.putNextEntry(new ZipEntry(path.substring(1)));
- FSDataInputStream in = getApi(context).open(path);
+ /**
+ * Concatenate files
+ * @param request download request
+ * @return response with all files concatenated
+ */
+ @POST
+ @Path("/concat")
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_OCTET_STREAM)
+ public Response concat(final DownloadRequest request) {
+ StreamingOutput result = new StreamingOutput() {
+ public void write(OutputStream output) throws IOException,
+ WebApplicationException {
+ FSDataInputStream in = null;
+ for (String path : request.entries) {
+ try {
+ in = getApi(context).open(path);
byte[] chunk = new byte[1024];
while (in.read(chunk) != -1) {
- zip.write(chunk);
+ output.write(chunk);
}
- } catch (IOException ex) {
- logger.error("Error zipping file " + path.substring(1) + ": "
- + ex.getMessage());
- zip.write(ex.getMessage().getBytes());
- } finally {
- zip.closeEntry();
+ } catch (Exception ex) {
+ ex.printStackTrace();
+ } finally {
+ if (in != null)
+ in.close();
+ }
}
-
+ }
+ };
+ ResponseBuilder response = Response.ok(result);
+ if (request.download){
+ response.header("Content-Disposition", "inline; filename=\"concatResult.txt\"").type(MediaType.APPLICATION_OCTET_STREAM);
+ } else {
+ response.header("Content-Disposition", "filename=\"concatResult.txt\"").type(MediaType.TEXT_PLAIN);
}
+ return response.build();
+ }
- private void zipDirectory(ZipOutputStream zip, String path) {
- try {
- zip.putNextEntry(new ZipEntry(path.substring(1) + "/"));
- zip.closeEntry();
- } catch (IOException e) {
- logger.error("Error zipping directory " + path.substring(1) + "/" + ": "
- + e.getMessage());
- }
- }
+ // ===============================
+ // Download files by unique link
- @POST
- @Path("/zip")
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_OCTET_STREAM)
- public Response downloadGZip(final DownloadRequest request) {
- StreamingOutput result = new StreamingOutput() {
- public void write(OutputStream output) throws IOException,
- WebApplicationException {
- ZipOutputStream zip = new ZipOutputStream(output);
- try {
- HdfsApi api = getApi(context);
- Queue<String> files = new LinkedList<String>();
- for (String file : request.entries) {
- files.add(file);
- }
- while (!files.isEmpty()) {
- String path = files.poll();
- FileStatus status = api.getFileStatus(path);
- if (status.isDirectory()) {
- FileStatus[] subdir = api.listdir(path);
- for (FileStatus file : subdir) {
- files.add(org.apache.hadoop.fs.Path
- .getPathWithoutSchemeAndAuthority(file.getPath())
- .toString());
- }
- zipDirectory(zip, path);
- } else {
- zipFile(zip, path);
- }
- }
- } catch (Exception ex) {
- logger.error("Error occured: " + ex.getMessage());
- } finally {
- zip.close();
- }
- }
- };
- return Response.ok(result)
- .header("Content-Disposition", "inline; filename=\"hdfs.zip\"").build();
- }
+ /**
+ * Download zip by unique link
+ * @param requestId id of request
+ * @return response with zip
+ */
+ @GET
+ @Path("/zip")
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_OCTET_STREAM)
+ public Response zipByRequestId(@QueryParam("requestId") String requestId) {
+ String json = context.getInstanceData(requestId);
+ DownloadRequest request = gson.fromJson(json, DownloadRequest.class);
+ context.removeInstanceData(requestId);
+ return downloadGZip(request);
+ }
- @POST
- @Path("/concat")
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_OCTET_STREAM)
- public Response concat(final DownloadRequest request) {
- StreamingOutput result = new StreamingOutput() {
- public void write(OutputStream output) throws IOException,
- WebApplicationException {
- FSDataInputStream in = null;
- for (String path : request.entries) {
- try {
- in = getApi(context).open(path);
- byte[] chunk = new byte[1024];
- while (in.read(chunk) != -1) {
- output.write(chunk);
- }
- } catch (Exception ex) {
- ex.printStackTrace();
- } finally {
- if (in != null)
- in.close();
- }
- }
- }
- };
- ResponseBuilder response = Response.ok(result);
- if (request.download){
- response.header("Content-Disposition", "inline; filename=\"concatResult.txt\"").type(MediaType.APPLICATION_OCTET_STREAM);
- } else {
- response.header("Content-Disposition", "filename=\"concatResult.txt\"").type(MediaType.TEXT_PLAIN);
- }
- return response.build();
- }
+ /**
+ * Generate link for zip
+ * @param request download request
+ * @return response wth request id
+ * @see #zipByRequestId(String)
+ */
+ @POST
+ @Path("/zip/generate-link")
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response zipGenerateLink(final DownloadRequest request) {
+ String requestId = generateUniqueIdentifer(request);
+ JSONObject json = new JSONObject();
+ json.put("requestId", requestId);
+ return Response.ok(json).build();
+ }
- // ===============================
- // Download files by unique link
+ /**
+ * Concatenate files by unique link
+ * @param requestId id of request
+ * @return response with concatenated files
+ */
+ @GET
+ @Path("/concat")
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_OCTET_STREAM)
+ public Response concatByRequestId(@QueryParam("requestId") String requestId) {
+ String json = context.getInstanceData(requestId);
+ DownloadRequest request = gson.fromJson(json, DownloadRequest.class);
+ context.removeInstanceData(requestId);
+ return concat(request);
+ }
- @GET
- @Path("/zip")
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_OCTET_STREAM)
- public Response zipByRequestId(@QueryParam("requestId") String requestId) {
- String json = context.getInstanceData(requestId);
- DownloadRequest request = gson.fromJson(json, DownloadRequest.class);
- context.removeInstanceData(requestId);
- return downloadGZip(request);
- }
+ /**
+ * Generate link for concat
+ * @param request download request
+ * @return response wth request id
+ * @see #concatByRequestId(String)
+ */
+ @POST
+ @Path("/concat/generate-link")
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response concatGenerateLink(final DownloadRequest request) {
+ String requestId = generateUniqueIdentifer(request);
+ JSONObject json = new JSONObject();
+ json.put("requestId", requestId);
+ return Response.ok(json).build();
+ }
- @POST
- @Path("/zip/generate-link")
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_JSON)
- public Response zipGenerateLink(final DownloadRequest request) {
- String requestId = generateUniqueIdentifer(request);
- JSONObject json = new JSONObject();
- json.put("requestId", requestId);
- return Response.ok(json).build();
- }
+ private Gson gson = new Gson();
- @GET
- @Path("/concat")
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_OCTET_STREAM)
- public Response concatByRequestId(@QueryParam("requestId") String requestId) {
- String json = context.getInstanceData(requestId);
- DownloadRequest request = gson.fromJson(json, DownloadRequest.class);
- context.removeInstanceData(requestId);
- return concat(request);
- }
-
- @POST
- @Path("/concat/generate-link")
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_JSON)
- public Response concatGenerateLink(final DownloadRequest request) {
- String requestId = generateUniqueIdentifer(request);
- JSONObject json = new JSONObject();
- json.put("requestId", requestId);
- return Response.ok(json).build();
- }
-
- private Gson gson = new Gson();
-
- private String generateUniqueIdentifer(DownloadRequest request) {
- String uuid = UUID.randomUUID().toString().replaceAll("-", "");
- String json = gson.toJson(request);
- context.putInstanceData(uuid, json);
- return uuid;
- }
+ private String generateUniqueIdentifer(DownloadRequest request) {
+ String uuid = UUID.randomUUID().toString().replaceAll("-", "");
+ String json = gson.toJson(request);
+ context.putInstanceData(uuid, json);
+ return uuid;
+ }
/*
* Temporary use Stream Output
@@ -279,4 +315,13 @@ public class DownloadService extends HdfsService {
* return output; }
*/
+ /**
+ * Wrapper for json mapping of download request
+ */
+ public static class DownloadRequest {
+ @XmlElement(nillable = false, required = true)
+ public String[] entries;
+ @XmlElement(required = false)
+ public boolean download;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba392ad7/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java
index d45d680..9224331 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java
@@ -24,29 +24,48 @@ import org.apache.ambari.view.ViewContext;
import com.google.inject.Inject;
+/**
+ * Root files service
+ */
public class FileBrowserService {
- @Inject
- ViewContext context;
+ @Inject
+ ViewContext context;
- @Path("/download")
- public DownloadService download() {
- return new DownloadService(context);
- }
+ /**
+ * @see org.apache.ambari.view.filebrowser.DownloadService
+ * @return service
+ */
+ @Path("/download")
+ public DownloadService download() {
+ return new DownloadService(context);
+ }
- @Path("/upload")
- public UploadService upload() {
- return new UploadService(context);
- }
+ /**
+ * @see org.apache.ambari.view.filebrowser.UploadService
+ * @return service
+ */
+ @Path("/upload")
+ public UploadService upload() {
+ return new UploadService(context);
+ }
- @Path("/fileops")
- public FileOperationService fileOps() {
- return new FileOperationService(context);
- }
+ /**
+ * @see org.apache.ambari.view.filebrowser.FileOperationService
+ * @return service
+ */
+ @Path("/fileops")
+ public FileOperationService fileOps() {
+ return new FileOperationService(context);
+ }
- @Path("/help")
- public HelpService help() {
- return new HelpService(context);
- }
+ /**
+ * @see org.apache.ambari.view.filebrowser.HelpService
+ * @return service
+ */
+ @Path("/help")
+ public HelpService help() {
+ return new HelpService(context);
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba392ad7/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java
index ce1f675..c8bd101 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java
@@ -40,142 +40,200 @@ import javax.xml.bind.annotation.XmlRootElement;
import org.apache.ambari.view.ViewContext;
+/**
+ * File operations service
+ */
public class FileOperationService extends HdfsService {
- public FileOperationService(ViewContext context) {
- super(context);
+ /**
+ * Constructor
+ * @param context View Context instance
+ */
+ public FileOperationService(ViewContext context) {
+ super(context);
+ }
+
+ /**
+ * List dir
+ * @param path path
+ * @return response with dir content
+ * @throws Exception
+ */
+ @GET
+ @Path("/listdir")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response listdir(@QueryParam("path") String path) throws Exception {
+ try {
+ return Response.ok(
+ HdfsApi.fileStatusToJSON(getApi(context).listdir(path))).build();
+ } catch (FileNotFoundException ex) {
+ return Response.ok(Response.Status.NOT_FOUND.getStatusCode())
+ .entity(ex.getMessage()).build();
+ } catch (Throwable ex) {
+ throw new Exception(ex.getMessage());
}
-
- @XmlRootElement
- public static class MkdirRequest {
- @XmlElement(nillable = false, required = true)
- public String path;
- }
-
-
- @XmlRootElement
- public static class SrcDstFileRequest {
- @XmlElement(nillable = false, required = true)
- public String src;
- @XmlElement(nillable = false, required = true)
- public String dst;
+ }
+
+ /**
+ * Rename
+ * @param request rename request
+ * @return response with success
+ * @throws IOException
+ * @throws Exception
+ */
+ @POST
+ @Path("/rename")
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response rename(final SrcDstFileRequest request) throws IOException,
+ Exception {
+ HdfsApi api = getApi(context);
+ ResponseBuilder result;
+ if (api.rename(request.src, request.dst)) {
+ result = Response.ok(HdfsApi.fileStatusToJSON(api
+ .getFileStatus(request.dst)));
+ } else {
+ result = Response.ok(new BoolResult(false)).status(422);
}
-
- @XmlRootElement
- public static class RemoveRequest {
- @XmlElement(nillable = false, required = true)
- public String path;
- public boolean recursive;
+ return result.build();
+ }
+
+ /**
+ * Copy file
+ * @param request source and destination request
+ * @return response with success
+ * @throws IOException
+ * @throws Exception
+ */
+ @POST
+ @Path("/copy")
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response copy(final SrcDstFileRequest request,
+ @Context HttpHeaders headers, @Context UriInfo ui) throws IOException,
+ Exception {
+ HdfsApi api = getApi(context);
+ ResponseBuilder result;
+ if (api.copy(request.src, request.dst)) {
+ result = Response.ok(HdfsApi.fileStatusToJSON(api
+ .getFileStatus(request.dst)));
+ } else {
+ result = Response.ok(new BoolResult(false)).status(422);
}
-
- @GET
- @Path("/listdir")
- @Produces(MediaType.APPLICATION_JSON)
- public Response listdir(@QueryParam("path") String path,
- @Context HttpHeaders headers, @Context UriInfo ui) throws Exception {
- try {
- return Response.ok(
- HdfsApi.fileStatusToJSON(getApi(context).listdir(path))).build();
- } catch (FileNotFoundException ex) {
- return Response.ok(Response.Status.NOT_FOUND.getStatusCode())
- .entity(ex.getMessage()).build();
- } catch (Throwable ex) {
- throw new Exception(ex.getMessage());
- }
+ return result.build();
+ }
+
+ /**
+ * Make directory
+ * @param request make directory request
+ * @return response with success
+ * @throws IOException
+ * @throws Exception
+ */
+ @PUT
+ @Path("/mkdir")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response mkdir(final MkdirRequest request) throws IOException,
+ Exception {
+ HdfsApi api = getApi(context);
+ ResponseBuilder result;
+ if (api.mkdir(request.path)) {
+ result = Response.ok(HdfsApi.fileStatusToJSON(api.getFileStatus(request.path)));
+ } else {
+ result = Response.ok(new BoolResult(false)).status(422);
}
-
- @POST
- @Path("/rename")
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_JSON)
- public Response rename(final SrcDstFileRequest request,
- @Context HttpHeaders headers, @Context UriInfo ui) throws IOException,
- Exception {
- HdfsApi api = getApi(context);
- ResponseBuilder result;
- if (api.rename(request.src, request.dst)) {
- result = Response.ok(HdfsApi.fileStatusToJSON(api
- .getFileStatus(request.dst)));
- } else {
- result = Response.ok(new BoolResult(false)).status(422);
- }
- return result.build();
+ return result.build();
+ }
+
+ /**
+ * Empty trash
+ * @return response with success
+ * @throws IOException
+ * @throws Exception
+ */
+ @DELETE
+ @Path("/trash/emptyTrash")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response emptyTrash() throws IOException, Exception {
+ HdfsApi api = getApi(context);
+ api.emptyTrash();
+ return Response.ok(new BoolResult(true)).build();
+ }
+
+ /**
+ * Move to trash
+ * @param request remove request
+ * @return response with success
+ * @throws IOException
+ * @throws Exception
+ */
+ @DELETE
+ @Path("/moveToTrash")
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response moveToTrash(RemoveRequest request) throws IOException, Exception {
+ HdfsApi api = getApi(context);
+ ResponseBuilder result;
+ if (api.moveToTrash(request.path)){
+ result = Response.ok(new BoolResult(true)).status(204);
+ } else {
+ result = Response.ok(new BoolResult(false)).status(422);
}
-
- @POST
- @Path("/copy")
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_JSON)
- public Response copy(final SrcDstFileRequest request,
- @Context HttpHeaders headers, @Context UriInfo ui) throws IOException,
- Exception {
- HdfsApi api = getApi(context);
- ResponseBuilder result;
- if (api.copy(request.src, request.dst)) {
- result = Response.ok(HdfsApi.fileStatusToJSON(api
- .getFileStatus(request.dst)));
- } else {
- result = Response.ok(new BoolResult(false)).status(422);
- }
- return result.build();
+ return result.build();
+ }
+
+ /**
+ * Remove
+ * @param request remove request
+ * @return response with success
+ * @throws IOException
+ * @throws Exception
+ */
+ @DELETE
+ @Path("/remove")
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response remove(RemoveRequest request, @Context HttpHeaders headers,
+ @Context UriInfo ui) throws IOException, Exception {
+ HdfsApi api = getApi(context);
+ ResponseBuilder result;
+ if (api.delete(request.path, request.recursive)){
+ result = Response.ok(new BoolResult(true)).status(204);
+ } else {
+ result = Response.ok(new BoolResult(false)).status(422);
}
-
- @PUT
- @Path("/mkdir")
- @Produces(MediaType.APPLICATION_JSON)
- public Response mkdir(final MkdirRequest request,
- @Context HttpHeaders headers, @Context UriInfo ui) throws IOException,
- Exception {
- HdfsApi api = getApi(context);
- ResponseBuilder result;
- if (api.mkdir(request.path)) {
- result = Response.ok(HdfsApi.fileStatusToJSON(api.getFileStatus(request.path)));
- } else {
- result = Response.ok(new BoolResult(false)).status(422);
- }
- return result.build();
- }
-
- @DELETE
- @Path("/trash/emptyTrash")
- @Produces(MediaType.APPLICATION_JSON)
- public Response emptyTrash(@Context HttpHeaders headers,
- @Context UriInfo ui) throws IOException, Exception {
- HdfsApi api = getApi(context);
- api.emptyTrash();
- return Response.ok(new BoolResult(true)).build();
- }
-
- @DELETE
- @Path("/moveToTrash")
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_JSON)
- public Response moveToTrash(RemoveRequest request, @Context HttpHeaders headers,
- @Context UriInfo ui) throws IOException, Exception {
- HdfsApi api = getApi(context);
- ResponseBuilder result;
- if (api.moveToTrash(request.path)){
- result = Response.ok(new BoolResult(true)).status(204);
- } else {
- result = Response.ok(new BoolResult(false)).status(422);
- }
- return result.build();
- }
-
- @DELETE
- @Path("/remove")
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_JSON)
- public Response remove(RemoveRequest request, @Context HttpHeaders headers,
- @Context UriInfo ui) throws IOException, Exception {
- HdfsApi api = getApi(context);
- ResponseBuilder result;
- if (api.delete(request.path, request.recursive)){
- result = Response.ok(new BoolResult(true)).status(204);
- } else {
- result = Response.ok(new BoolResult(false)).status(422);
- }
- return result.build();
- }
-
+ return result.build();
+ }
+
+ /**
+ * Wrapper for json mapping of mkdir request
+ */
+ @XmlRootElement
+ public static class MkdirRequest {
+ @XmlElement(nillable = false, required = true)
+ public String path;
+ }
+
+
+ /**
+ * Wrapper for json mapping of request with
+ * source and destination
+ */
+ @XmlRootElement
+ public static class SrcDstFileRequest {
+ @XmlElement(nillable = false, required = true)
+ public String src;
+ @XmlElement(nillable = false, required = true)
+ public String dst;
+ }
+
+ /**
+ * Wrapper for json mapping of remove request
+ */
+ @XmlRootElement
+ public static class RemoveRequest {
+ @XmlElement(nillable = false, required = true)
+ public String path;
+ public boolean recursive;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba392ad7/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
index b5d29c4..79ea0d9 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
@@ -33,205 +33,301 @@ import org.json.simple.JSONArray;
import java.util.LinkedHashMap;
+/**
+ * Hdfs Business Delegate
+ */
public class HdfsApi {
- private final Configuration conf = new Configuration();
-
- private FileSystem fs;
- private UserGroupInformation ugi;
-
- public HdfsApi(String defaultFs, String username) throws IOException,
- InterruptedException {
- conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
- conf.set("fs.webhdfs.impl", "org.apache.hadoop.hdfs.web.WebHdfsFileSystem");
- conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
- fs = FileSystem.get(URI.create(defaultFs), conf, username);
- ugi = UserGroupInformation.createProxyUser(username,
- UserGroupInformation.getLoginUser());
- }
+ private final Configuration conf = new Configuration();
- public FileStatus[] listdir(final String path) throws FileNotFoundException,
- IOException, InterruptedException {
- return ugi.doAs(new PrivilegedExceptionAction<FileStatus[]>() {
- public FileStatus[] run() throws FileNotFoundException, Exception {
- return fs.listStatus(new Path(path));
- }
- });
- }
+ private FileSystem fs;
+ private UserGroupInformation ugi;
- public FileStatus getFileStatus(final String path) throws IOException,
- FileNotFoundException, InterruptedException {
- return ugi.doAs(new PrivilegedExceptionAction<FileStatus>() {
- public FileStatus run() throws FileNotFoundException, IOException {
- return fs.getFileStatus(new Path(path));
- }
- });
- }
+ /**
+ * Constructor
+ * @param defaultFs hdfs uri
+ * @param username user.name
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public HdfsApi(String defaultFs, String username) throws IOException,
+ InterruptedException {
+ conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
+ conf.set("fs.webhdfs.impl", "org.apache.hadoop.hdfs.web.WebHdfsFileSystem");
+ conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
+ fs = FileSystem.get(URI.create(defaultFs), conf, username);
+ ugi = UserGroupInformation.createProxyUser(username,
+ UserGroupInformation.getLoginUser());
+ }
- public boolean mkdir(final String path) throws IOException,
- InterruptedException {
- return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
- public Boolean run() throws Exception {
- return fs.mkdirs(new Path(path));
- }
- });
- }
+ /**
+ * List dir operation
+ * @param path path
+ * @return array of FileStatus objects
+ * @throws FileNotFoundException
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public FileStatus[] listdir(final String path) throws FileNotFoundException,
+ IOException, InterruptedException {
+ return ugi.doAs(new PrivilegedExceptionAction<FileStatus[]>() {
+ public FileStatus[] run() throws FileNotFoundException, Exception {
+ return fs.listStatus(new Path(path));
+ }
+ });
+ }
- public boolean rename(final String src, final String dst) throws IOException,
- InterruptedException {
- return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
- public Boolean run() throws Exception {
- return fs.rename(new Path(src), new Path(dst));
- }
- });
- }
+ /**
+ * Get file status
+ * @param path path
+ * @return file status
+ * @throws IOException
+ * @throws FileNotFoundException
+ * @throws InterruptedException
+ */
+ public FileStatus getFileStatus(final String path) throws IOException,
+ FileNotFoundException, InterruptedException {
+ return ugi.doAs(new PrivilegedExceptionAction<FileStatus>() {
+ public FileStatus run() throws FileNotFoundException, IOException {
+ return fs.getFileStatus(new Path(path));
+ }
+ });
+ }
- public boolean trashEnabled() throws Exception {
- return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
- public Boolean run() throws IOException {
- Trash tr = new Trash(fs, conf);
- return tr.isEnabled();
- }
- });
- }
+ /**
+ * Make directory
+ * @param path path
+ * @return success
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public boolean mkdir(final String path) throws IOException,
+ InterruptedException {
+ return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+ public Boolean run() throws Exception {
+ return fs.mkdirs(new Path(path));
+ }
+ });
+ }
- public Path getHomeDir() throws Exception {
- return ugi.doAs(new PrivilegedExceptionAction<Path>() {
- public Path run() throws IOException {
- return fs.getHomeDirectory();
- }
- });
- }
+ /**
+ * Rename
+ * @param src source path
+ * @param dst destination path
+ * @return success
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public boolean rename(final String src, final String dst) throws IOException,
+ InterruptedException {
+ return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+ public Boolean run() throws Exception {
+ return fs.rename(new Path(src), new Path(dst));
+ }
+ });
+ }
- public Path getTrashDir() throws Exception {
- return ugi.doAs(new PrivilegedExceptionAction<Path>() {
- public Path run() throws IOException {
- TrashPolicy trashPolicy = TrashPolicy.getInstance(conf, fs,
- fs.getHomeDirectory());
- return trashPolicy.getCurrentTrashDir().getParent();
- }
- });
- }
+ /**
+ * Check is trash enabled
+ * @return true if trash is enabled
+ * @throws Exception
+ */
+ public boolean trashEnabled() throws Exception {
+ return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+ public Boolean run() throws IOException {
+ Trash tr = new Trash(fs, conf);
+ return tr.isEnabled();
+ }
+ });
+ }
- public Void emptyTrash() throws Exception {
- return ugi.doAs(new PrivilegedExceptionAction<Void>() {
- public Void run() throws IOException {
- Trash tr = new Trash(fs, conf);
- tr.expunge();
- return null;
- }
- });
- }
+ /**
+ * Home directory
+ * @return home directory
+ * @throws Exception
+ */
+ public Path getHomeDir() throws Exception {
+ return ugi.doAs(new PrivilegedExceptionAction<Path>() {
+ public Path run() throws IOException {
+ return fs.getHomeDirectory();
+ }
+ });
+ }
- public boolean moveToTrash(final String path) throws IOException,
- InterruptedException {
- return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
- public Boolean run() throws Exception {
- return Trash.moveToAppropriateTrash(fs, new Path(path), conf);
- }
- });
- }
+ /**
+ * Trash directory
+ * @return trash directory
+ * @throws Exception
+ */
+ public Path getTrashDir() throws Exception {
+ return ugi.doAs(new PrivilegedExceptionAction<Path>() {
+ public Path run() throws IOException {
+ TrashPolicy trashPolicy = TrashPolicy.getInstance(conf, fs,
+ fs.getHomeDirectory());
+ return trashPolicy.getCurrentTrashDir().getParent();
+ }
+ });
+ }
- public boolean delete(final String path, final boolean recursive)
- throws IOException, InterruptedException {
- return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
- public Boolean run() throws Exception {
- return fs.delete(new Path(path), recursive);
- }
- });
- }
+ /**
+ * Empty trash
+ * @return
+ * @throws Exception
+ */
+ public Void emptyTrash() throws Exception {
+ return ugi.doAs(new PrivilegedExceptionAction<Void>() {
+ public Void run() throws IOException {
+ Trash tr = new Trash(fs, conf);
+ tr.expunge();
+ return null;
+ }
+ });
+ }
- public FSDataOutputStream create(final String path, final boolean overwrite)
- throws IOException, InterruptedException {
- return ugi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
- public FSDataOutputStream run() throws Exception {
- return fs.create(new Path(path), overwrite);
- }
- });
- }
+ /**
+ * Move to trash
+ * @param path path
+ * @return success
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public boolean moveToTrash(final String path) throws IOException,
+ InterruptedException {
+ return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+ public Boolean run() throws Exception {
+ return Trash.moveToAppropriateTrash(fs, new Path(path), conf);
+ }
+ });
+ }
- public FSDataInputStream open(final String path) throws IOException,
- InterruptedException {
- return ugi.doAs(new PrivilegedExceptionAction<FSDataInputStream>() {
- public FSDataInputStream run() throws Exception {
- return fs.open(new Path(path));
- }
- });
- }
+ /**
+ * Delete
+ * @param path path
+ * @param recursive delete recursive
+ * @return success
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public boolean delete(final String path, final boolean recursive)
+ throws IOException, InterruptedException {
+ return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+ public Boolean run() throws Exception {
+ return fs.delete(new Path(path), recursive);
+ }
+ });
+ }
- public boolean copy(final String src, final String dest) throws IOException,
- InterruptedException {
- return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
- public Boolean run() throws Exception {
- return FileUtil
- .copy(fs, new Path(src), fs, new Path(dest), false, conf);
- }
- });
- }
+ /**
+ * Create file
+ * @param path path
+ * @param overwrite overwrite existent file
+ * @return output stream
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public FSDataOutputStream create(final String path, final boolean overwrite)
+ throws IOException, InterruptedException {
+ return ugi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
+ public FSDataOutputStream run() throws Exception {
+ return fs.create(new Path(path), overwrite);
+ }
+ });
+ }
- /**
- * Converts a Hadoop permission into a Unix permission symbolic representation
- * (i.e. -rwxr--r--) or default if the permission is NULL.
- *
- * @param p
- * Hadoop permission.
- * @return the Unix permission symbolic representation or default if the
- * permission is NULL.
- */
- private static String permissionToString(FsPermission p) {
- return (p == null) ? "default" : "-" + p.getUserAction().SYMBOL
- + p.getGroupAction().SYMBOL + p.getOtherAction().SYMBOL;
- }
+ /**
+ * Open file
+ * @param path path
+ * @return input stream
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public FSDataInputStream open(final String path) throws IOException,
+ InterruptedException {
+ return ugi.doAs(new PrivilegedExceptionAction<FSDataInputStream>() {
+ public FSDataInputStream run() throws Exception {
+ return fs.open(new Path(path));
+ }
+ });
+ }
- /**
- * Converts a Hadoop <code>FileStatus</code> object into a JSON array object.
- * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
- * specified URL.
- * <p/>
- *
- * @param status
- * Hadoop file status.
- * @param hoopBaseUrl
- * base URL to replace the <code>SCHEME://HOST:PORT</code> in the
- * file status.
- * @return The JSON representation of the file status.
- */
-
- public static Map<String, Object> fileStatusToJSON(FileStatus status) {
- Map<String, Object> json = new LinkedHashMap<String, Object>();
- json.put("path", Path.getPathWithoutSchemeAndAuthority(status.getPath())
- .toString());
- json.put("replication", status.getReplication());
- json.put("isDirectory", status.isDirectory());
- json.put("len", status.getLen());
- json.put("owner", status.getOwner());
- json.put("group", status.getGroup());
- json.put("permission", permissionToString(status.getPermission()));
- json.put("accessTime", status.getAccessTime());
- json.put("modificationTime", status.getModificationTime());
- json.put("blockSize", status.getBlockSize());
- json.put("replication", status.getReplication());
- return json;
- }
+ /**
+ * Copy file
+ * @param src source path
+ * @param dest destination path
+ * @return success
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public boolean copy(final String src, final String dest) throws IOException,
+ InterruptedException {
+ return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+ public Boolean run() throws Exception {
+ return FileUtil
+ .copy(fs, new Path(src), fs, new Path(dest), false, conf);
+ }
+ });
+ }
+
+ /**
+ * Converts a Hadoop permission into a Unix permission symbolic representation
+ * (i.e. -rwxr--r--) or default if the permission is NULL.
+ *
+ * @param p
+ * Hadoop permission.
+ * @return the Unix permission symbolic representation or default if the
+ * permission is NULL.
+ */
+ private static String permissionToString(FsPermission p) {
+ return (p == null) ? "default" : "-" + p.getUserAction().SYMBOL
+ + p.getGroupAction().SYMBOL + p.getOtherAction().SYMBOL;
+ }
+
+ /**
+ * Converts a Hadoop <code>FileStatus</code> object into a JSON array object.
+ * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
+ * specified URL.
+ * <p/>
+ *
+ * @param status
+ * Hadoop file status.
+ * @return The JSON representation of the file status.
+ */
+
+ public static Map<String, Object> fileStatusToJSON(FileStatus status) {
+ Map<String, Object> json = new LinkedHashMap<String, Object>();
+ json.put("path", Path.getPathWithoutSchemeAndAuthority(status.getPath())
+ .toString());
+ json.put("replication", status.getReplication());
+ json.put("isDirectory", status.isDirectory());
+ json.put("len", status.getLen());
+ json.put("owner", status.getOwner());
+ json.put("group", status.getGroup());
+ json.put("permission", permissionToString(status.getPermission()));
+ json.put("accessTime", status.getAccessTime());
+ json.put("modificationTime", status.getModificationTime());
+ json.put("blockSize", status.getBlockSize());
+ json.put("replication", status.getReplication());
+ return json;
+ }
- /**
- * Converts a Hadoop <code>FileStatus</code> array into a JSON array object.
- * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
- * specified URL.
- * <p/>
- *
- * @param status
- * Hadoop file status array.
- * @return The JSON representation of the file status array.
- */
- @SuppressWarnings("unchecked")
- public static JSONArray fileStatusToJSON(FileStatus[] status) {
- JSONArray json = new JSONArray();
- if (status != null) {
- for (FileStatus s : status) {
- json.add(fileStatusToJSON(s));
- }
- }
- return json;
+ /**
+ * Converts a Hadoop <code>FileStatus</code> array into a JSON array object.
+ * It replaces the <code>SCHEME://HOST:PORT</code> of the path with the
+ * specified URL.
+ * <p/>
+ *
+ * @param status
+ * Hadoop file status array.
+ * @return The JSON representation of the file status array.
+ */
+ @SuppressWarnings("unchecked")
+ public static JSONArray fileStatusToJSON(FileStatus[] status) {
+ JSONArray json = new JSONArray();
+ if (status != null) {
+ for (FileStatus s : status) {
+ json.add(fileStatusToJSON(s));
+ }
}
+ return json;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba392ad7/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
index fc71ad8..6765de0 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
@@ -26,33 +26,49 @@ import org.apache.ambari.view.ViewContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+/**
+ * Base Hdfs service
+ */
public abstract class HdfsService {
- protected static final Logger logger = LoggerFactory.getLogger(HdfsService.class);
+ protected static final Logger logger = LoggerFactory.getLogger(HdfsService.class);
- protected final ViewContext context;
+ protected final ViewContext context;
- public HdfsService(ViewContext context) {
- this.context = context;
- }
+ /**
+ * Constructor
+ * @param context View Context instance
+ */
+ public HdfsService(ViewContext context) {
+ this.context = context;
+ }
- @XmlRootElement
- public static class BoolResult{
- public boolean success;
- public BoolResult(boolean success){
- this.success = success;
- }
+ /**
+ * Wrapper for json mapping of bool response
+ */
+ @XmlRootElement
+ public static class BoolResult{
+ public boolean success;
+ public BoolResult(boolean success){
+ this.success = success;
}
+ }
- private HdfsApi _api = null;
+ private HdfsApi _api = null;
- public HdfsApi getApi(ViewContext context) throws IOException, Exception {
- if (_api == null) {
- Thread.currentThread().setContextClassLoader(null);
- _api = new HdfsApi(context.getProperties().get("dataworker.defaultFs")
- .toString(), context.getUsername());
- }
- return _api;
+ /**
+ * Ger HdfsApi instance
+ * @param context View Context instance
+ * @return HdfsApi business delegate
+ * @throws IOException
+ * @throws Exception
+ */
+ public HdfsApi getApi(ViewContext context) throws IOException, Exception {
+ if (_api == null) {
+ Thread.currentThread().setContextClassLoader(null);
+ _api = new HdfsApi(context.getProperties().get("dataworker.defaultFs"), context.getUsername());
}
+ return _api;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba392ad7/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
index 508e4c1..145ca3e 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
@@ -33,69 +33,103 @@ import javax.ws.rs.core.UriInfo;
import org.apache.ambari.view.ViewContext;
+/**
+ * Help service
+ */
public class HelpService extends HdfsService {
- public HelpService(ViewContext context) {
- super(context);
- }
+ /**
+ * Constructor
+ * @param context View Context instance
+ */
+ public HelpService(ViewContext context) {
+ super(context);
+ }
- @GET
- @Path("/version")
- @Produces(MediaType.TEXT_PLAIN)
- public Response version(@Context HttpHeaders headers, @Context UriInfo ui) {
- return Response.ok("0.0.1-SNAPSHOT").build();
- }
+ /**
+ * Version
+ * @return version
+ */
+ @GET
+ @Path("/version")
+ @Produces(MediaType.TEXT_PLAIN)
+ public Response version() {
+ return Response.ok("0.0.1-SNAPSHOT").build();
+ }
- @GET
- @Path("/description")
- @Produces(MediaType.TEXT_PLAIN)
- public Response description(@Context HttpHeaders headers, @Context UriInfo ui) {
- return Response.ok("Application to work with HDFS").build();
- }
+ /**
+ * Description
+ * @return description
+ */
+ @GET
+ @Path("/description")
+ @Produces(MediaType.TEXT_PLAIN)
+ public Response description() {
+ return Response.ok("Application to work with HDFS").build();
+ }
- @GET
- @Path("/filesystem")
- @Produces(MediaType.TEXT_PLAIN)
- public Response filesystem(@Context HttpHeaders headers, @Context UriInfo ui) {
- return Response.ok(
- context.getProperties().get("dataworker.defaultFs").toString()).build();
- }
+ /**
+ * Filesystem configuration
+ * @return filesystem configuration
+ */
+ @GET
+ @Path("/filesystem")
+ @Produces(MediaType.TEXT_PLAIN)
+ public Response filesystem() {
+ return Response.ok(
+ context.getProperties().get("dataworker.defaultFs")).build();
+ }
- @GET
- @Path("/home")
- @Produces(MediaType.APPLICATION_JSON)
- public Response homeDir(@Context HttpHeaders headers, @Context UriInfo ui)
- throws FileNotFoundException, IOException, InterruptedException,
- Exception {
- HdfsApi api = getApi(context);
- return Response
- .ok(HdfsApi.fileStatusToJSON(api.getFileStatus(api.getHomeDir()
- .toString()))).build();
- }
+ /**
+ * Returns home directory
+ * @return home directory
+ * @throws Exception
+ */
+ @GET
+ @Path("/home")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response homeDir()
+ throws
+ Exception {
+ HdfsApi api = getApi(context);
+ return Response
+ .ok(HdfsApi.fileStatusToJSON(api.getFileStatus(api.getHomeDir()
+ .toString()))).build();
+ }
- @GET
- @Path("/trash/enabled")
- @Produces(MediaType.APPLICATION_JSON)
- public Response trashEnabled(@Context HttpHeaders headers, @Context UriInfo ui)
- throws Exception {
- HdfsApi api = getApi(context);
- return Response.ok(new BoolResult(api.trashEnabled())).build();
- }
+ /**
+ * Is trash enabled
+ * @return is trash enabled
+ * @throws Exception
+ */
+ @GET
+ @Path("/trash/enabled")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response trashEnabled()
+ throws Exception {
+ HdfsApi api = getApi(context);
+ return Response.ok(new BoolResult(api.trashEnabled())).build();
+ }
- @GET
- @Path("/trashDir")
- @Produces(MediaType.APPLICATION_JSON)
- public Response trashdir(@Context HttpHeaders headers, @Context UriInfo ui)
- throws IOException, Exception {
- HdfsApi api = getApi(context);
- try {
- return Response.ok(
- HdfsApi.fileStatusToJSON(api.getFileStatus(api.getTrashDir()
- .toString()))).build();
- } catch (FileNotFoundException ex) {
- return Response.ok(new BoolResult(false)).status(Status.NOT_FOUND)
- .build();
- }
+ /**
+ * Trash dir
+ * @return trash dir
+ * @throws Exception
+ */
+ @GET
+ @Path("/trashDir")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response trashdir()
+ throws Exception {
+ HdfsApi api = getApi(context);
+ try {
+ return Response.ok(
+ HdfsApi.fileStatusToJSON(api.getFileStatus(api.getTrashDir()
+ .toString()))).build();
+ } catch (FileNotFoundException ex) {
+ return Response.ok(new BoolResult(false)).status(Status.NOT_FOUND)
+ .build();
}
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba392ad7/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java
index fdcd0f2..27c23e1 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/UploadService.java
@@ -36,61 +36,85 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import com.sun.jersey.core.header.FormDataContentDisposition;
import com.sun.jersey.multipart.FormDataParam;
+/**
+ * Upload service
+ */
public class UploadService extends HdfsService {
- public UploadService(ViewContext context) {
- super(context);
- }
+ /**
+ * Constructor
+ * @param context View Context instance
+ */
+ public UploadService(ViewContext context) {
+ super(context);
+ }
- private void uploadFile(final String filePath, InputStream uploadedInputStream)
- throws IOException, Exception {
- byte[] chunk = new byte[1024];
- FSDataOutputStream out = getApi(context).create(filePath, false);
- while (uploadedInputStream.read(chunk) != -1) {
- out.write(chunk);
- }
- out.close();
+ private void uploadFile(final String filePath, InputStream uploadedInputStream)
+ throws IOException, Exception {
+ byte[] chunk = new byte[1024];
+ FSDataOutputStream out = getApi(context).create(filePath, false);
+ while (uploadedInputStream.read(chunk) != -1) {
+ out.write(chunk);
}
+ out.close();
+ }
- @PUT
- @Consumes(MediaType.MULTIPART_FORM_DATA)
- @Produces(MediaType.APPLICATION_JSON)
- public Response uploadFile(
- @FormDataParam("file") InputStream uploadedInputStream,
- @FormDataParam("file") FormDataContentDisposition contentDisposition,
- @FormDataParam("path") String path) throws IOException, Exception {
- if (!path.endsWith("/"))
- path = path + "/";
- String filePath = path + contentDisposition.getFileName();
- uploadFile(filePath, uploadedInputStream);
- return Response.ok(
- HdfsApi.fileStatusToJSON(getApi(context).getFileStatus(filePath)))
- .build();
- }
+ /**
+ * Upload file
+ * @param uploadedInputStream file input stream
+ * @param contentDisposition content disposition
+ * @param path path
+ * @return file status
+ * @throws Exception
+ */
+ @PUT
+ @Consumes(MediaType.MULTIPART_FORM_DATA)
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response uploadFile(
+ @FormDataParam("file") InputStream uploadedInputStream,
+ @FormDataParam("file") FormDataContentDisposition contentDisposition,
+ @FormDataParam("path") String path) throws Exception {
+ if (!path.endsWith("/"))
+ path = path + "/";
+ String filePath = path + contentDisposition.getFileName();
+ uploadFile(filePath, uploadedInputStream);
+ return Response.ok(
+ HdfsApi.fileStatusToJSON(getApi(context).getFileStatus(filePath)))
+ .build();
+ }
- @PUT
- @Path("/zip")
- @Consumes(MediaType.MULTIPART_FORM_DATA)
- @Produces(MediaType.APPLICATION_JSON)
- public Response uploadZip(
- @FormDataParam("file") InputStream uploadedInputStream,
- @FormDataParam("file") FormDataContentDisposition contentDisposition,
- @FormDataParam("path") String path) throws IOException, Exception {
- if (!path.endsWith("/"))
- path = path + "/";
- ZipInputStream zip = new ZipInputStream(uploadedInputStream);
- ZipEntry ze = zip.getNextEntry();
- HdfsApi api = getApi(context);
- while (ze != null) {
- String filePath = path + ze.getName();
- if (ze.isDirectory()) {
- api.mkdir(filePath);
- } else {
- uploadFile(filePath, zip);
- }
- ze = zip.getNextEntry();
- }
- return Response.ok(HdfsApi.fileStatusToJSON(api.listdir(path))).build();
+ /**
+ * Upload zip and unpack
+ * @param uploadedInputStream file input stream
+ * @param contentDisposition content disposition
+ * @param path path
+ * @return files statuses
+ * @throws IOException
+ * @throws Exception
+ */
+ @PUT
+ @Path("/zip")
+ @Consumes(MediaType.MULTIPART_FORM_DATA)
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response uploadZip(
+ @FormDataParam("file") InputStream uploadedInputStream,
+ @FormDataParam("file") FormDataContentDisposition contentDisposition,
+ @FormDataParam("path") String path) throws Exception {
+ if (!path.endsWith("/"))
+ path = path + "/";
+ ZipInputStream zip = new ZipInputStream(uploadedInputStream);
+ ZipEntry ze = zip.getNextEntry();
+ HdfsApi api = getApi(context);
+ while (ze != null) {
+ String filePath = path + ze.getName();
+ if (ze.isDirectory()) {
+ api.mkdir(filePath);
+ } else {
+ uploadFile(filePath, zip);
+ }
+ ze = zip.getNextEntry();
}
+ return Response.ok(HdfsApi.fileStatusToJSON(api.listdir(path))).build();
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba392ad7/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java b/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
index e74d5b2..ba5f316 100644
--- a/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
+++ b/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
@@ -25,20 +25,15 @@ import static org.easymock.EasyMock.replay;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
-import java.io.FileOutputStream;
import java.io.FileWriter;
-import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
-import java.util.zip.ZipInputStream;
-import java.util.zip.ZipOutputStream;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
-import javax.ws.rs.client.WebTarget;
import org.apache.ambari.view.ViewContext;
import org.apache.ambari.view.ViewResourceHandler;
@@ -61,125 +56,117 @@ import com.sun.jersey.multipart.FormDataBodyPart;
public class FilebrowserTest{
- private ViewResourceHandler handler;
- private ViewContext context;
- private HttpHeaders httpHeaders;
- private UriInfo uriInfo;
-
- private Map<String, String> properties;
- private FileBrowserService fileBrowserService;
-
- private MiniDFSCluster hdfsCluster;
- public static final String BASE_URI = "http://localhost:8084/myapp/";
-
-
- @Before
- public void setUp() throws Exception {
- handler = createNiceMock(ViewResourceHandler.class);
- context = createNiceMock(ViewContext.class);
- httpHeaders = createNiceMock(HttpHeaders.class);
- uriInfo = createNiceMock(UriInfo.class);
-
- properties = new HashMap<String, String>();
- File baseDir = new File("./target/hdfs/" + "FilebrowserTest")
- .getAbsoluteFile();
- FileUtil.fullyDelete(baseDir);
- Configuration conf = new Configuration();
- conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
- MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
- hdfsCluster = builder.build();
- String hdfsURI = hdfsCluster.getURI() + "/";
- properties.put("dataworker.defaultFs", hdfsURI);
- expect(context.getProperties()).andReturn(properties).anyTimes();
- expect(context.getUsername()).andReturn("ambari-qa").anyTimes();
- replay(handler, context, httpHeaders, uriInfo);
- fileBrowserService = getService(FileBrowserService.class, handler, context);
- FileOperationService.MkdirRequest request = new FileOperationService.MkdirRequest();
- request.path = "/tmp";
- fileBrowserService.fileOps().mkdir(request, httpHeaders, uriInfo);
- }
-
- @After
- public void tearDown() {
- hdfsCluster.shutdown();
- }
-
- // TODO : fix test!!!
- @Ignore
- @Test
- public void testListDir() throws IOException, Exception {
- FileOperationService.MkdirRequest request = new FileOperationService.MkdirRequest();
- request.path = "/tmp1";
- fileBrowserService.fileOps().mkdir(request, httpHeaders, uriInfo);
- Response response = fileBrowserService.fileOps().listdir("/", httpHeaders,
- uriInfo);
- JSONArray statuses = (JSONArray) response.getEntity();
- System.out.println(response.getEntity());
- Assert.assertEquals(200, response.getStatus());
- Assert.assertTrue(statuses.size() > 0);
- System.out.println(statuses);
- }
-
- private Response uploadFile(String path, String fileName,
- String fileExtension, String fileContent) throws Exception {
- File tempFile = File.createTempFile(fileName, fileExtension);
- BufferedWriter bw = new BufferedWriter(new FileWriter(tempFile));
- bw.write(fileContent);
- bw.close();
- InputStream content = new FileInputStream(tempFile);
- FormDataBodyPart inputStreamBody = new FormDataBodyPart(
- FormDataContentDisposition.name("file")
- .fileName(fileName + fileExtension).build(), content,
- MediaType.APPLICATION_OCTET_STREAM_TYPE);
-
- Response response = fileBrowserService.upload().uploadFile(content,
- inputStreamBody.getFormDataContentDisposition(), "/tmp/");
- return response;
- }
-
- // TODO : fix test!!!
- @Ignore
- @Test
- public void testUploadFile() throws Exception {
- Response response = uploadFile("/tmp/", "testUpload", ".tmp", "Hello world");
- Assert.assertEquals(200, response.getStatus());
- Response listdir = fileBrowserService.fileOps().listdir("/tmp", httpHeaders,
- uriInfo);
- JSONArray statuses = (JSONArray) listdir.getEntity();
- System.out.println(statuses.size());
- Response response2 = fileBrowserService.download().browse("/tmp/testUpload.tmp", false, httpHeaders, uriInfo);
- Assert.assertEquals(200, response2.getStatus());
- }
-
- // TODO : fix test!!!
- @Ignore
- @Test
- public void testStreamingGzip() throws Exception {
- String gzipDir = "/tmp/testGzip";
- FileOperationService.MkdirRequest request = new FileOperationService.MkdirRequest();
- request.path = gzipDir;
- fileBrowserService.fileOps().mkdir(request, httpHeaders, uriInfo);
- for (int i = 0; i < 10; i++) {
- uploadFile(gzipDir, "testGzip" + i, ".txt", "Hello world" + i);
- }
- DownloadService.DownloadRequest dr = new DownloadService.DownloadRequest();
- dr.entries = new String[] { gzipDir };
-
- Response result = fileBrowserService.download().downloadGZip(dr);
- }
-
- private static <T> T getService(Class<T> clazz,
- final ViewResourceHandler viewResourceHandler,
- final ViewContext viewInstanceContext) {
- Injector viewInstanceInjector = Guice.createInjector(new AbstractModule() {
- @Override
- protected void configure() {
- bind(ViewResourceHandler.class).toInstance(viewResourceHandler);
- bind(ViewContext.class).toInstance(viewInstanceContext);
- }
- });
- return viewInstanceInjector.getInstance(clazz);
+ private ViewResourceHandler handler;
+ private ViewContext context;
+ private HttpHeaders httpHeaders;
+ private UriInfo uriInfo;
+
+ private Map<String, String> properties;
+ private FileBrowserService fileBrowserService;
+
+ private MiniDFSCluster hdfsCluster;
+ public static final String BASE_URI = "http://localhost:8084/myapp/";
+
+
+ @Before
+ public void setUp() throws Exception {
+ handler = createNiceMock(ViewResourceHandler.class);
+ context = createNiceMock(ViewContext.class);
+ httpHeaders = createNiceMock(HttpHeaders.class);
+ uriInfo = createNiceMock(UriInfo.class);
+
+ properties = new HashMap<String, String>();
+ File baseDir = new File("./target/hdfs/" + "FilebrowserTest")
+ .getAbsoluteFile();
+ FileUtil.fullyDelete(baseDir);
+ Configuration conf = new Configuration();
+ conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
+ MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
+ hdfsCluster = builder.build();
+ String hdfsURI = hdfsCluster.getURI() + "/";
+ properties.put("dataworker.defaultFs", hdfsURI);
+ expect(context.getProperties()).andReturn(properties).anyTimes();
+ expect(context.getUsername()).andReturn(System.getProperty("user.name")).anyTimes();
+ replay(handler, context, httpHeaders, uriInfo);
+ fileBrowserService = getService(FileBrowserService.class, handler, context);
+ FileOperationService.MkdirRequest request = new FileOperationService.MkdirRequest();
+ request.path = "/tmp";
+ fileBrowserService.fileOps().mkdir(request);
+ }
+
+ @After
+ public void tearDown() {
+ hdfsCluster.shutdown();
+ }
+
+ @Test
+ public void testListDir() throws Exception {
+ FileOperationService.MkdirRequest request = new FileOperationService.MkdirRequest();
+ request.path = "/tmp1";
+ fileBrowserService.fileOps().mkdir(request);
+ Response response = fileBrowserService.fileOps().listdir("/");
+ JSONArray statuses = (JSONArray) response.getEntity();
+ System.out.println(response.getEntity());
+ Assert.assertEquals(200, response.getStatus());
+ Assert.assertTrue(statuses.size() > 0);
+ System.out.println(statuses);
+ }
+
+ private Response uploadFile(String path, String fileName,
+ String fileExtension, String fileContent) throws Exception {
+ File tempFile = File.createTempFile(fileName, fileExtension);
+ BufferedWriter bw = new BufferedWriter(new FileWriter(tempFile));
+ bw.write(fileContent);
+ bw.close();
+ InputStream content = new FileInputStream(tempFile);
+ FormDataBodyPart inputStreamBody = new FormDataBodyPart(
+ FormDataContentDisposition.name("file")
+ .fileName(fileName + fileExtension).build(), content,
+ MediaType.APPLICATION_OCTET_STREAM_TYPE);
+
+ Response response = fileBrowserService.upload().uploadFile(content,
+ inputStreamBody.getFormDataContentDisposition(), "/tmp/");
+ return response;
+ }
+
+ @Test
+ public void testUploadFile() throws Exception {
+ Response response = uploadFile("/tmp/", "testUpload", ".tmp", "Hello world");
+ Assert.assertEquals(200, response.getStatus());
+ Response listdir = fileBrowserService.fileOps().listdir("/tmp");
+ JSONArray statuses = (JSONArray) listdir.getEntity();
+ System.out.println(statuses.size());
+ Response response2 = fileBrowserService.download().browse("/tmp/testUpload.tmp", false, httpHeaders, uriInfo);
+ Assert.assertEquals(200, response2.getStatus());
+ }
+
+ @Test
+ public void testStreamingGzip() throws Exception {
+ String gzipDir = "/tmp/testGzip";
+ FileOperationService.MkdirRequest request = new FileOperationService.MkdirRequest();
+ request.path = gzipDir;
+ fileBrowserService.fileOps().mkdir(request);
+ for (int i = 0; i < 10; i++) {
+ uploadFile(gzipDir, "testGzip" + i, ".txt", "Hello world" + i);
}
+ DownloadService.DownloadRequest dr = new DownloadService.DownloadRequest();
+ dr.entries = new String[] { gzipDir };
+
+ Response result = fileBrowserService.download().downloadGZip(dr);
+ }
+
+ private static <T> T getService(Class<T> clazz,
+ final ViewResourceHandler viewResourceHandler,
+ final ViewContext viewInstanceContext) {
+ Injector viewInstanceInjector = Guice.createInjector(new AbstractModule() {
+ @Override
+ protected void configure() {
+ bind(ViewResourceHandler.class).toInstance(viewResourceHandler);
+ bind(ViewContext.class).toInstance(viewInstanceContext);
+ }
+ });
+ return viewInstanceInjector.getInstance(clazz);
+ }
}