You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by db...@apache.org on 2016/03/01 11:45:16 UTC

ambari git commit: AMBARI-15216. New Files view: Error on concatenating and downloading large number of files. (dipayanb)

Repository: ambari
Updated Branches:
  refs/heads/trunk a93350745 -> 61be6b22e


AMBARI-15216. New Files view: Error on concatenating and downloading large number of files. (dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/61be6b22
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/61be6b22
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/61be6b22

Branch: refs/heads/trunk
Commit: 61be6b22e5e06079d6a8b3a82ba1e4510a79953e
Parents: a933507
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Tue Mar 1 16:14:43 2016 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Tue Mar 1 16:14:43 2016 +0530

----------------------------------------------------------------------
 .../view/filebrowser/DownloadService.java       | 89 ++++++++++----------
 contrib/views/files/src/main/resources/view.xml |  8 ++
 2 files changed, 53 insertions(+), 44 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/61be6b22/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
index 749174a..95a07b5 100644
--- a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
+++ b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
@@ -47,10 +47,14 @@ import javax.ws.rs.core.UriInfo;
 import javax.xml.bind.annotation.XmlElement;
 
 import com.google.gson.Gson;
+import org.apache.ambari.view.filebrowser.utils.MisconfigurationFormattedException;
 import org.apache.ambari.view.filebrowser.utils.NotFoundFormattedException;
 import org.apache.ambari.view.filebrowser.utils.ServiceFormattedException;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
 import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.ambari.view.ViewContext;
 import org.apache.hadoop.security.AccessControlException;
@@ -285,12 +289,7 @@ public class DownloadService extends HdfsService {
   @Produces("application/zip")
   public Response zipByRequestId(@QueryParam("requestId") String requestId) {
     try {
-      String json = context.getInstanceData(requestId);
-      if (json == null) {
-        throw new NotFoundFormattedException("Request is old", null);
-      }
-      DownloadRequest request = gson.fromJson(json, DownloadRequest.class);
-      context.removeInstanceData(requestId);
+      DownloadRequest request = getDownloadRequest(requestId);
       return downloadGZip(request);
     } catch (WebApplicationException ex) {
       throw ex;
@@ -310,16 +309,7 @@ public class DownloadService extends HdfsService {
   @Consumes(MediaType.APPLICATION_JSON)
   @Produces(MediaType.APPLICATION_JSON)
   public Response zipGenerateLink(final DownloadRequest request) {
-    try {
-      String requestId = generateUniqueIdentifer(request);
-      JSONObject json = new JSONObject();
-      json.put("requestId", requestId);
-      return Response.ok(json).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex);
-    }
+    return generateLink(request);
   }
 
   /**
@@ -333,9 +323,7 @@ public class DownloadService extends HdfsService {
   @Produces(MediaType.APPLICATION_OCTET_STREAM)
   public Response concatByRequestId(@QueryParam("requestId") String requestId) {
     try {
-      String json = context.getInstanceData(requestId);
-      DownloadRequest request = gson.fromJson(json, DownloadRequest.class);
-      context.removeInstanceData(requestId);
+      DownloadRequest request = getDownloadRequest(requestId);
       return concat(request);
     } catch (WebApplicationException ex) {
       throw ex;
@@ -355,6 +343,10 @@ public class DownloadService extends HdfsService {
   @Consumes(MediaType.APPLICATION_JSON)
   @Produces(MediaType.APPLICATION_JSON)
   public Response concatGenerateLink(final DownloadRequest request) {
+    return generateLink(request);
+  }
+
+  private Response generateLink(DownloadRequest request) {
     try {
       String requestId = generateUniqueIdentifer(request);
       JSONObject json = new JSONObject();
@@ -367,39 +359,48 @@ public class DownloadService extends HdfsService {
     }
   }
 
+  private DownloadRequest getDownloadRequest(String requestId) throws HdfsApiException, IOException, InterruptedException {
+    String fileName = getFileNameForRequestData(requestId);
+    String json = HdfsUtil.readFile(getApi(context), fileName);
+    DownloadRequest request = gson.fromJson(json, DownloadRequest.class);
+
+    deleteFileFromHdfs(fileName);
+    return request;
+  }
+
   private Gson gson = new Gson();
 
   private String generateUniqueIdentifer(DownloadRequest request) {
     String uuid = UUID.randomUUID().toString().replaceAll("-", "");
     String json = gson.toJson(request);
-    context.putInstanceData(uuid, json);
+    writeToHdfs(uuid, json);
     return uuid;
   }
 
-    /*
-     * Temporary use Stream Output
-     *
-     * @POST
-     *
-     * @Path("/concat")
-     *
-     * @Consumes(MediaType.APPLICATION_JSON)
-     *
-     * @Produces(MediaType.APPLICATION_OCTET_STREAM) public ChunkedOutput<byte[]>
-     * concat(final DownloadRequest request) { final ChunkedOutput<byte[]> output
-     * = new ChunkedOutput<byte[]>(byte[].class);
-     *
-     * new Thread() { public void run() { try { FSDataInputStream in = null; for
-     * (String path : request.entries) { try { in = getApi(context).open(path);
-     * byte[] chunk = new byte[1024]; while (in.read(chunk) != -1) {
-     * output.write(chunk); } } finally { if (in != null) in.close(); }
-     *
-     * } } catch (Exception ex) { logger.error("Error occured: " +
-     * ex.getMessage()); } finally { try { output.close(); } catch (IOException e)
-     * { e.printStackTrace(); } } } }.start();
-     *
-     * return output; }
-     */
+  private void writeToHdfs(String uuid, String json) {
+    String fileName = getFileNameForRequestData(uuid);
+    try {
+      HdfsUtil.putStringToFile(getApi(context), fileName, json);
+    } catch (HdfsApiException e) {
+      logger.error("Failed to write request data to HDFS", e);
+      throw new ServiceFormattedException("Failed to write request data to HDFS", e);
+    }
+  }
+
+  private String getFileNameForRequestData(String uuid) {
+    String tmpPath = context.getProperties().get("tmp.dir");
+    if (tmpPath == null) {
+      String msg = "tmp.dir is not configured!";
+      logger.error(msg);
+      throw new MisconfigurationFormattedException("tmp.dir");
+    }
+    return String.format(tmpPath + "/%s.json", uuid);
+  }
+
+  private void deleteFileFromHdfs(String fileName) throws IOException, InterruptedException {
+    getApi(context).delete(fileName, true);
+  }
+
 
   /**
    * Wrapper for json mapping of download request

http://git-wip-us.apache.org/repos/asf/ambari/blob/61be6b22/contrib/views/files/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/view.xml b/contrib/views/files/src/main/resources/view.xml
index adaec10..eb80292 100644
--- a/contrib/views/files/src/main/resources/view.xml
+++ b/contrib/views/files/src/main/resources/view.xml
@@ -117,6 +117,14 @@
         <label>WebHDFS Authorization</label>
         <required>false</required>
     </parameter>
+    <parameter>
+        <name>tmp.dir</name>
+        <description>HDFS directory path to store temporary files required for the view operations.</description>
+        <label>Temporary HDFS Directory</label>
+        <placeholder>/user/${username}/files-view/tmp</placeholder>
+        <default-value>/user/${username}/files-view/tmp</default-value>
+        <required>true</required>
+    </parameter>
 
     <resource>
         <name>files</name>