You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mr...@apache.org on 2017/09/11 04:39:48 UTC

[78/94] [abbrv] ambari git commit: AMBARI-21890.Ambari Files View - browser going to hung state while opening a HDFS folder which has huge number of files(>10000)(Venkata Sairam)

AMBARI-21890.Ambari Files View - browser going to hung state while opening a HDFS folder which has huge number of files(>10000)(Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f6ecbd1d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f6ecbd1d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f6ecbd1d

Branch: refs/heads/branch-feature-AMBARI-14714
Commit: f6ecbd1d7fa5f90ec020e64dbb403b3c3c2bdeb3
Parents: 5b1a63b
Author: Venkata Sairam <ve...@gmail.com>
Authored: Fri Sep 8 08:38:54 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Fri Sep 8 08:38:54 2017 +0530

----------------------------------------------------------------------
 .../view/commons/hdfs/FileOperationService.java |  41 +++-
 .../resources/ui/app/components/file-search.js  |  10 +-
 .../main/resources/ui/app/controllers/files.js  |  20 +-
 .../src/main/resources/ui/app/routes/files.js   |  16 +-
 .../ui/app/templates/components/file-row.hbs    |   2 +-
 .../ui/app/templates/components/file-search.hbs |   2 +-
 .../main/resources/ui/app/templates/files.hbs   |   8 +-
 .../view/filebrowser/FilebrowserTest.java       |   4 +-
 .../ambari/view/utils/hdfs/DirListInfo.java     |  97 +++++++++
 .../ambari/view/utils/hdfs/DirStatus.java       |  75 +++++++
 .../apache/ambari/view/utils/hdfs/HdfsApi.java  | 124 ++++++++++--
 .../ambari/view/utils/hdfs/HdfsApiTest.java     | 201 +++++++++++++++++++
 12 files changed, 557 insertions(+), 43 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
----------------------------------------------------------------------
diff --git a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
index d6e484d..6fa1056 100644
--- a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
+++ b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
@@ -18,12 +18,17 @@
 
 package org.apache.ambari.view.commons.hdfs;
 
+import com.google.common.base.Strings;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.commons.exceptions.NotFoundFormattedException;
 import org.apache.ambari.view.commons.exceptions.ServiceFormattedException;
+import org.apache.ambari.view.utils.hdfs.DirListInfo;
+import org.apache.ambari.view.utils.hdfs.DirStatus;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.ws.rs.*;
 import javax.ws.rs.core.*;
@@ -41,6 +46,14 @@ import java.util.Map;
  * File operations service
  */
 public class FileOperationService extends HdfsService {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(FileOperationService.class);
+
+
+  private static final String FILES_VIEW_MAX_FILE_PER_PAGE = "views.files.max.files.per.page";
+  private static final int DEFAULT_FILES_VIEW_MAX_FILE_PER_PAGE = 5000;
+
+  private Integer maxFilesPerPage = DEFAULT_FILES_VIEW_MAX_FILE_PER_PAGE;
 
   /**
    * Constructor
@@ -48,6 +61,19 @@ public class FileOperationService extends HdfsService {
    */
   public FileOperationService(ViewContext context) {
     super(context);
+    setMaxFilesPerPage(context);
+  }
+
+  private void setMaxFilesPerPage(ViewContext context) {
+    String maxFilesPerPageProperty = context.getAmbariProperty(FILES_VIEW_MAX_FILE_PER_PAGE);
+    LOG.info("maxFilesPerPageProperty = {}", maxFilesPerPageProperty);
+    if(!Strings.isNullOrEmpty(maxFilesPerPageProperty)){
+      try {
+        maxFilesPerPage = Integer.parseInt(maxFilesPerPageProperty);
+      }catch(Exception e){
+        LOG.error("{} should be integer, but it is {}, using default value of {}", FILES_VIEW_MAX_FILE_PER_PAGE , maxFilesPerPageProperty, DEFAULT_FILES_VIEW_MAX_FILE_PER_PAGE);
+      }
+    }
   }
 
   /**
@@ -56,21 +82,30 @@ public class FileOperationService extends HdfsService {
    */
   public FileOperationService(ViewContext context, Map<String, String> customProperties) {
     super(context, customProperties);
+    this.setMaxFilesPerPage(context);
   }
 
   /**
    * List dir
    * @param path path
+   * @param nameFilter : name on which filter is applied
    * @return response with dir content
    */
   @GET
   @Path("/listdir")
   @Produces(MediaType.APPLICATION_JSON)
-  public Response listdir(@QueryParam("path") String path) {
+  public Response listdir(@QueryParam("path") String path, @QueryParam("nameFilter") String nameFilter) {
     try {
       JSONObject response = new JSONObject();
-      response.put("files", getApi().fileStatusToJSON(getApi().listdir(path)));
-      response.put("meta", getApi().fileStatusToJSON(getApi().getFileStatus(path)));
+      Map<String, Object> parentInfo = getApi().fileStatusToJSON(getApi().getFileStatus(path));
+      DirStatus dirStatus = getApi().listdir(path, nameFilter, maxFilesPerPage);
+      DirListInfo dirListInfo = dirStatus.getDirListInfo();
+      parentInfo.put("originalSize", dirListInfo.getOriginalSize());
+      parentInfo.put("truncated", dirListInfo.isTruncated());
+      parentInfo.put("finalSize", dirListInfo.getFinalSize());
+      parentInfo.put("nameFilter", dirListInfo.getNameFilter());
+      response.put("files", getApi().fileStatusToJSON(dirStatus.getFileStatuses()));
+      response.put("meta", parentInfo);
       return Response.ok(response).build();
     } catch (WebApplicationException ex) {
       throw ex;

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/main/resources/ui/app/components/file-search.js
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/components/file-search.js b/contrib/views/files/src/main/resources/ui/app/components/file-search.js
index b65749c..68ec280 100644
--- a/contrib/views/files/src/main/resources/ui/app/components/file-search.js
+++ b/contrib/views/files/src/main/resources/ui/app/components/file-search.js
@@ -23,11 +23,6 @@ export default Ember.Component.extend({
   classNameBindings: ['expanded::col-md-9', 'expanded::col-md-offset-3'],
   expanded: false,
 
-  searchText: '',
-
-  throttleTyping: Ember.observer('searchText', function() {
-    Ember.run.debounce(this, this.searchFiles, 500);
-  }),
 
   searchFiles: function() {
     this.sendAction('searchAction', this.get('searchText'));
@@ -38,5 +33,10 @@ export default Ember.Component.extend({
   },
   focusOut: function() {
     this.set('expanded', false);
+  },
+  actions : {
+      throttleTyping: function() {
+        Ember.run.debounce(this, this.searchFiles, 1000);
+      }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/main/resources/ui/app/controllers/files.js
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/controllers/files.js b/contrib/views/files/src/main/resources/ui/app/controllers/files.js
index 8b5bb7b..30d9896 100644
--- a/contrib/views/files/src/main/resources/ui/app/controllers/files.js
+++ b/contrib/views/files/src/main/resources/ui/app/controllers/files.js
@@ -28,9 +28,9 @@ export default Ember.Controller.extend({
   isSelected: Ember.computed('selectedFilesCount', 'selectedFolderCount', function() {
     return (this.get('selectedFilesCount') + this.get('selectedFolderCount')) !== 0;
   }),
-
-  queryParams: ['path'],
+  queryParams: ['path', 'filter'],
   path: '/',
+  filter: '',
   columns: columnConfig,
 
   currentMessagesCount: Ember.computed.alias('logger.currentMessagesCount'),
@@ -71,16 +71,10 @@ export default Ember.Controller.extend({
     return parentPath;
   }),
 
-  sortedContent: Ember.computed.sort('model', 'sortProperty'),
+  arrangedContent:  Ember.computed.sort('model', 'sortProperty'),
 
-  arrangedContent: Ember.computed('model', 'sortProperty', 'validSearchText', function() {
-    var searchText = this.get('validSearchText');
-    if(!Ember.isBlank(searchText)) {
-      return this.get('sortedContent').filter(function(entry) {
-        return !!entry.get('name').match(searchText);
-      });
-    }
-    return this.get('sortedContent');
+  metaInfo: Ember.computed('model', function() {
+    return this.get('model.meta');
   }),
 
   selectedFilePathsText: function () {
@@ -144,7 +138,7 @@ export default Ember.Controller.extend({
     selectAll: function(selectStatus) {
       this.get('fileSelectionService').deselectAll();
       if(selectStatus === false) {
-        this.get('fileSelectionService').selectFiles(this.get('sortedContent'));
+        this.get('fileSelectionService').selectFiles(this.get('arrangedContent'));
       }
     },
 
@@ -155,7 +149,7 @@ export default Ember.Controller.extend({
 
     //Context Menu actions
     openFolder: function(path) {
-      this.transitionToRoute({queryParams: {path: path}});
+      this.transitionToRoute({queryParams: {path: path, filter:''}});
     }
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/main/resources/ui/app/routes/files.js
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/routes/files.js b/contrib/views/files/src/main/resources/ui/app/routes/files.js
index 140732f..be7a515 100644
--- a/contrib/views/files/src/main/resources/ui/app/routes/files.js
+++ b/contrib/views/files/src/main/resources/ui/app/routes/files.js
@@ -26,13 +26,15 @@ export default Ember.Route.extend(FileOperationMixin, {
   queryParams: {
     path: {
       refreshModel: true
+    },
+    filter: {
+      refreshModel: true
     }
   },
   model: function(params) {
     this.store.unloadAll('file');
-    return this.store.query('file', {path: params.path});
+    return this.store.query('file', {path: params.path, nameFilter:params.filter});
   },
-
   setupController: function(controller, model) {
     this._super(controller, model);
     controller.set('searchText', '');
@@ -44,7 +46,17 @@ export default Ember.Route.extend(FileOperationMixin, {
     refreshCurrentRoute: function() {
       this.refresh();
     },
+    searchAction : function(searchText) {
+     this.set('controller.filter', searchText);
 
+     this.transitionTo({
+       queryParams: {
+         path: this.get('currentPath'),
+         filter: searchText
+       }
+     });
+
+    },
     error: function(error, transition) {
       this.get('fileSelectionService').reset();
       let path = transition.queryParams.path;

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/main/resources/ui/app/templates/components/file-row.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/templates/components/file-row.hbs b/contrib/views/files/src/main/resources/ui/app/templates/components/file-row.hbs
index 72ed840..5198504 100644
--- a/contrib/views/files/src/main/resources/ui/app/templates/components/file-row.hbs
+++ b/contrib/views/files/src/main/resources/ui/app/templates/components/file-row.hbs
@@ -19,7 +19,7 @@
 <div class="row">
   <div class={{get-value-from-columns columnHeaders 'name' 'columnClass'}}>
     {{#if file.isDirectory}}
-      {{#link-to 'files' (query-params path=file.path) bubbles=false title=file.name}}{{fa-icon "folder-o"}} {{shorten-text file.name 40}} {{/link-to}}
+      {{#link-to 'files' (query-params path=file.path filter='') bubbles=false title=file.name}}{{fa-icon "folder-o"}} {{shorten-text file.name 40}} {{/link-to}}
     {{else}}
       <span title={{ file.name }}>{{fa-icon "file-o"}} {{shorten-text file.name 40}}</span>
     {{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/main/resources/ui/app/templates/components/file-search.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/templates/components/file-search.hbs b/contrib/views/files/src/main/resources/ui/app/templates/components/file-search.hbs
index 298d672..f3dc8f9 100644
--- a/contrib/views/files/src/main/resources/ui/app/templates/components/file-search.hbs
+++ b/contrib/views/files/src/main/resources/ui/app/templates/components/file-search.hbs
@@ -16,5 +16,5 @@
 * limitations under the License.
 }}
 
-{{input type="text" placeholder="Search in current directory..." class="form-control input-sm" value=searchText}}
+{{input type="text" placeholder="Search in current directory..." class="form-control input-sm" action='throttleTyping' on="key-down" value=searchText}}
 <span class="input-group-addon">{{fa-icon icon='search'}}</span>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/main/resources/ui/app/templates/files.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/templates/files.hbs b/contrib/views/files/src/main/resources/ui/app/templates/files.hbs
index 63e0dd8..5714cf3 100644
--- a/contrib/views/files/src/main/resources/ui/app/templates/files.hbs
+++ b/contrib/views/files/src/main/resources/ui/app/templates/files.hbs
@@ -29,7 +29,11 @@
       {{else}}
           <span class="context-text" style="    z-index: 1;
       position: relative;">
-        Total: <strong>{{arrangedContent.length}}</strong> files or folders
+        {{#if metaInfo.truncated}}
+            Showing <strong>{{arrangedContent.length}}</strong> files or folders of <strong>{{metaInfo.originalSize}}</strong>
+        {{else}}
+            Total: <strong>{{arrangedContent.length}}</strong> files or folders
+        {{/if}}
       </span>
       {{/if}}
     </div>
@@ -82,7 +86,7 @@
         </div>
         <div class="col-md-4 col-xs-4">
             <div class="row">
-              {{file-search searchText=searchText searchAction="searchFiles"}}
+              {{file-search searchText=filter searchAction="searchAction"}}
             </div>
         </div>
     </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java b/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
index f431f66..6ddc8f6 100644
--- a/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
+++ b/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
@@ -110,7 +110,7 @@ public class FilebrowserTest{
     FileOperationService.MkdirRequest request = new FileOperationService.MkdirRequest();
     request.path = "/tmp1";
     fileBrowserService.fileOps().mkdir(request);
-    Response response = fileBrowserService.fileOps().listdir("/");
+    Response response = fileBrowserService.fileOps().listdir("/", null);
     JSONObject responseObject = (JSONObject) response.getEntity();
     JSONArray statuses = (JSONArray) responseObject.get("files");
     System.out.println(response.getEntity());
@@ -140,7 +140,7 @@ public class FilebrowserTest{
   public void testUploadFile() throws Exception {
     Response response = uploadFile("/tmp/", "testUpload", ".tmp", "Hello world");
     Assert.assertEquals(200, response.getStatus());
-    Response listdir = fileBrowserService.fileOps().listdir("/tmp");
+    Response listdir = fileBrowserService.fileOps().listdir("/tmp", null);
     JSONObject responseObject = (JSONObject) listdir.getEntity();
     JSONArray statuses = (JSONArray) responseObject.get("files");
     System.out.println(statuses.size());

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirListInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirListInfo.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirListInfo.java
new file mode 100644
index 0000000..6bd13bb
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirListInfo.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.hdfs;
+
+public class DirListInfo {
+  private int originalSize;
+  private boolean truncated;
+  private int finalSize;
+  private String nameFilter;
+
+  public DirListInfo(int originalSize, boolean truncated, int finalSize, String nameFilter) {
+    this.originalSize = originalSize;
+    this.truncated = truncated;
+    this.finalSize = finalSize;
+    this.nameFilter = nameFilter;
+  }
+
+  public int getOriginalSize() {
+    return originalSize;
+  }
+
+  public void setOriginalSize(int originalSize) {
+    this.originalSize = originalSize;
+  }
+
+  public boolean isTruncated() {
+    return truncated;
+  }
+
+  public void setTruncated(boolean truncated) {
+    this.truncated = truncated;
+  }
+
+  public int getFinalSize() {
+    return finalSize;
+  }
+
+  public void setFinalSize(int finalSize) {
+    this.finalSize = finalSize;
+  }
+
+  public String getNameFilter() {
+    return nameFilter;
+  }
+
+  public void setNameFilter(String nameFilter) {
+    this.nameFilter = nameFilter;
+  }
+
+  @Override
+  public String toString() {
+    return "DirListInfo{" +
+        "originalSize=" + originalSize +
+        ", truncated=" + truncated +
+        ", finalSize=" + finalSize +
+        ", nameFilter='" + nameFilter + '\'' +
+        '}';
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    DirListInfo that = (DirListInfo) o;
+
+    if (originalSize != that.originalSize) return false;
+    if (truncated != that.truncated) return false;
+    if (finalSize != that.finalSize) return false;
+    return nameFilter != null ? nameFilter.equals(that.nameFilter) : that.nameFilter == null;
+  }
+
+  @Override
+  public int hashCode() {
+    int result = originalSize;
+    result = 31 * result + (truncated ? 1 : 0);
+    result = 31 * result + finalSize;
+    result = 31 * result + (nameFilter != null ? nameFilter.hashCode() : 0);
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirStatus.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirStatus.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirStatus.java
new file mode 100644
index 0000000..f922b00
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirStatus.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.utils.hdfs;
+
+import org.apache.hadoop.fs.FileStatus;
+
+import java.util.Arrays;
+
+public class DirStatus {
+  private DirListInfo dirListInfo;
+  private FileStatus [] fileStatuses;
+
+  public DirStatus(FileStatus[] fileStatuses, DirListInfo dirListInfo) {
+    this.fileStatuses = fileStatuses;
+    this.dirListInfo = dirListInfo;
+  }
+
+  public DirListInfo getDirListInfo() {
+    return dirListInfo;
+  }
+
+  public void setDirListInfo(DirListInfo dirListInfo) {
+    this.dirListInfo = dirListInfo;
+  }
+
+  public FileStatus[] getFileStatuses() {
+    return fileStatuses;
+  }
+
+  public void setFileStatuses(FileStatus[] fileStatuses) {
+    this.fileStatuses = fileStatuses;
+  }
+
+  @Override
+  public String toString() {
+    return "DirStatus{" +
+        "dirListInfo=" + dirListInfo +
+        ", fileStatuses=" + Arrays.toString(fileStatuses) +
+        '}';
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    DirStatus dirStatus = (DirStatus) o;
+
+    if (dirListInfo != null ? !dirListInfo.equals(dirStatus.dirListInfo) : dirStatus.dirListInfo != null) return false;
+    // Probably incorrect - comparing Object[] arrays with Arrays.equals
+    return Arrays.equals(fileStatuses, dirStatus.fileStatuses);
+  }
+
+  @Override
+  public int hashCode() {
+    int result = dirListInfo != null ? dirListInfo.hashCode() : 0;
+    result = 31 * result + Arrays.hashCode(fileStatuses);
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
index 90fa483..8b987be 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
@@ -18,6 +18,7 @@
 
 package org.apache.ambari.view.utils.hdfs;
 
+import com.google.common.base.Strings;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -40,6 +41,7 @@ import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
 import java.util.LinkedHashMap;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
@@ -51,13 +53,14 @@ public class HdfsApi {
       LoggerFactory.getLogger(HdfsApi.class);
 
   private final Configuration conf;
-  private final Map<String, String> authParams;
+  private Map<String, String> authParams;
 
   private FileSystem fs;
   private UserGroupInformation ugi;
 
   /**
    * Constructor
+   *
    * @param configurationBuilder hdfs configuration builder
    * @throws IOException
    * @throws InterruptedException
@@ -76,6 +79,38 @@ public class HdfsApi {
     });
   }
 
+  /**
+   * for testing
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws HdfsApiException
+   */
+  HdfsApi(Configuration configuration, FileSystem fs, UserGroupInformation ugi) throws IOException,
+      InterruptedException, HdfsApiException {
+    if(null != configuration){
+      conf = configuration;
+    }else {
+      conf = new Configuration();
+    }
+
+    UserGroupInformation.setConfiguration(conf);
+    if(null != ugi){
+      this.ugi = ugi;
+    }else {
+      this.ugi = UserGroupInformation.getCurrentUser();
+    }
+
+    if(null != fs){
+      this.fs = fs;
+    }else {
+      this.fs = execute(new PrivilegedExceptionAction<FileSystem>() {
+        public FileSystem run() throws IOException {
+          return FileSystem.get(conf);
+        }
+      });
+    }
+  }
+
   private UserGroupInformation getProxyUser() throws IOException {
     UserGroupInformation proxyuser;
     if (authParams.containsKey("proxyuser")) {
@@ -101,6 +136,7 @@ public class HdfsApi {
 
   /**
    * List dir operation
+   *
    * @param path path
    * @return array of FileStatus objects
    * @throws FileNotFoundException
@@ -117,7 +153,56 @@ public class HdfsApi {
   }
 
   /**
+   *
+   * @param path : list files and dirs in this path
+   * @param nameFilter : if not empty or null, then file names that contain this are only sent.
+   * @param maxAllowedSize : maximum number of files sent in output. -1 means infinite.
+   * @return
+   * @throws FileNotFoundException
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public DirStatus listdir(final String path, final String nameFilter, int maxAllowedSize) throws FileNotFoundException,
+      IOException, InterruptedException {
+    FileStatus[] fileStatuses = this.listdir(path);
+    return filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+  }
+
+  public DirStatus filterAndTruncateDirStatus(String nameFilter, int maxAllowedSize, FileStatus[] fileStatuses) {
+    if(null == fileStatuses){
+      return new DirStatus(null, new DirListInfo(0, false, 0, nameFilter));
+    }
+
+    int originalSize = fileStatuses.length;
+    boolean truncated = false;
+
+    if (!Strings.isNullOrEmpty(nameFilter)) {
+      List<FileStatus> filteredList = new LinkedList<>();
+      for(FileStatus fileStatus : fileStatuses){
+        if(maxAllowedSize >=0 && maxAllowedSize <= filteredList.size()){
+          truncated = true;
+          break;
+        }
+        if(fileStatus.getPath().getName().contains(nameFilter)){
+          filteredList.add(fileStatus);
+        }
+      }
+      fileStatuses = filteredList.toArray(new FileStatus[0]);
+    }
+
+    if(maxAllowedSize >=0 && fileStatuses.length > maxAllowedSize) { // in cases where name filter loop is not executed.
+      truncated = true;
+      fileStatuses = Arrays.copyOf(fileStatuses, maxAllowedSize);
+    }
+
+    int finalSize = fileStatuses.length;
+
+    return new DirStatus(fileStatuses, new DirListInfo(originalSize, truncated, finalSize, nameFilter));
+  }
+
+  /**
    * Get file status
+   *
    * @param path path
    * @return file status
    * @throws IOException
@@ -135,6 +220,7 @@ public class HdfsApi {
 
   /**
    * Make directory
+   *
    * @param path path
    * @return success
    * @throws IOException
@@ -151,6 +237,7 @@ public class HdfsApi {
 
   /**
    * Rename
+   *
    * @param src source path
    * @param dst destination path
    * @return success
@@ -168,6 +255,7 @@ public class HdfsApi {
 
   /**
    * Check is trash enabled
+   *
    * @return true if trash is enabled
    * @throws Exception
    */
@@ -182,6 +270,7 @@ public class HdfsApi {
 
   /**
    * Home directory
+   *
    * @return home directory
    * @throws Exception
    */
@@ -195,6 +284,7 @@ public class HdfsApi {
 
   /**
    * Hdfs Status
+   *
    * @return home directory
    * @throws Exception
    */
@@ -208,6 +298,7 @@ public class HdfsApi {
 
   /**
    * Trash directory
+   *
    * @return trash directory
    * @throws Exception
    */
@@ -236,7 +327,7 @@ public class HdfsApi {
   /**
    * Trash directory path.
    *
-   * @param    filePath        the path to the file
+   * @param filePath the path to the file
    * @return trash directory path for the file
    * @throws Exception
    */
@@ -251,6 +342,7 @@ public class HdfsApi {
 
   /**
    * Empty trash
+   *
    * @return
    * @throws Exception
    */
@@ -266,6 +358,7 @@ public class HdfsApi {
 
   /**
    * Move to trash
+   *
    * @param path path
    * @return success
    * @throws IOException
@@ -282,7 +375,8 @@ public class HdfsApi {
 
   /**
    * Delete
-   * @param path path
+   *
+   * @param path      path
    * @param recursive delete recursive
    * @return success
    * @throws IOException
@@ -299,7 +393,8 @@ public class HdfsApi {
 
   /**
    * Create file
-   * @param path path
+   *
+   * @param path      path
    * @param overwrite overwrite existent file
    * @return output stream
    * @throws IOException
@@ -316,6 +411,7 @@ public class HdfsApi {
 
   /**
    * Open file
+   *
    * @param path path
    * @return input stream
    * @throws IOException
@@ -332,7 +428,8 @@ public class HdfsApi {
 
   /**
    * Change permissions
-   * @param path path
+   *
+   * @param path        path
    * @param permissions permissions in format rwxrwxrwx
    * @throws IOException
    * @throws InterruptedException
@@ -353,7 +450,8 @@ public class HdfsApi {
 
   /**
    * Copy file
-   * @param src source path
+   *
+   * @param src  source path
    * @param dest destination path
    * @throws java.io.IOException
    * @throws InterruptedException
@@ -380,8 +478,9 @@ public class HdfsApi {
 
   /**
    * Executes action on HDFS using doAs
+   *
    * @param action strategy object
-   * @param <T> result type
+   * @param <T>    result type
    * @return result of operation
    * @throws IOException
    * @throws InterruptedException
@@ -419,10 +518,9 @@ public class HdfsApi {
    * Converts a Hadoop permission into a Unix permission symbolic representation
    * (i.e. -rwxr--r--) or default if the permission is NULL.
    *
-   * @param p
-   *          Hadoop permission.
+   * @param p Hadoop permission.
    * @return the Unix permission symbolic representation or default if the
-   *         permission is NULL.
+   * permission is NULL.
    */
   private static String permissionToString(FsPermission p) {
     return (p == null) ? "default" : "-" + p.getUserAction().SYMBOL
@@ -435,8 +533,7 @@ public class HdfsApi {
    * specified URL.
    * <p/>
    *
-   * @param status
-   *          Hadoop file status.
+   * @param status Hadoop file status.
    * @return The JSON representation of the file status.
    */
   public Map<String, Object> fileStatusToJSON(FileStatus status) {
@@ -465,8 +562,7 @@ public class HdfsApi {
    * specified URL.
    * <p/>
    *
-   * @param status
-   *          Hadoop file status array.
+   * @param status Hadoop file status array.
    * @return The JSON representation of the file status array.
    */
   @SuppressWarnings("unchecked")

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/hdfs/HdfsApiTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/hdfs/HdfsApiTest.java b/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/hdfs/HdfsApiTest.java
new file mode 100644
index 0000000..e7a6752
--- /dev/null
+++ b/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/hdfs/HdfsApiTest.java
@@ -0,0 +1,201 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+
+public class HdfsApiTest {
+  private FileSystem fs;
+  private HdfsApi hdfsApi;
+  private Configuration conf;
+  private MiniDFSCluster hdfsCluster;
+
+  @Before
+  public void setup() throws IOException, HdfsApiException, InterruptedException {
+    File baseDir = new File("./target/hdfs/" + "HdfsApiTest.filterAndTruncateDirStatus").getAbsoluteFile();
+    FileUtil.fullyDelete(baseDir);
+
+    conf = new Configuration();
+    conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
+    MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
+    hdfsCluster = builder.build();
+    String hdfsURI = hdfsCluster.getURI() + "/";
+    conf.set("webhdfs.url", hdfsURI);
+    conf.set("fs.defaultFS", hdfsURI);
+    fs = FileSystem.get(conf);
+    hdfsApi = new HdfsApi(conf, fs, null);
+
+  }
+
+  @After
+  public void tearDown(){
+    hdfsCluster.shutdown();
+  }
+
+  @Test
+  public void filterAndTruncateDirStatus() throws Exception {
+    {
+      // null fileStatuses
+      DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus("", 0, null);
+      Assert.assertEquals(new DirStatus(null, new DirListInfo(0, false, 0, "")), dirStatus);
+    }
+
+    {
+      FileStatus[] fileStatuses = getFileStatuses(10);
+      DirStatus dirStatus1 = hdfsApi.filterAndTruncateDirStatus("", 0, fileStatuses);
+      Assert.assertEquals(new DirStatus(new FileStatus[0], new DirListInfo(10, true, 0, "")), dirStatus1);
+    }
+
+    {
+      int originalSize = 10;
+      int maxAllowedSize = 5;
+      String nameFilter = "";
+      FileStatus[] fileStatuses = getFileStatuses(originalSize);
+      DirStatus dirStatus2 = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+      Assert.assertEquals(new DirStatus(Arrays.copyOf(fileStatuses, maxAllowedSize), new DirListInfo(originalSize, true, maxAllowedSize, nameFilter)), dirStatus2);
+    }
+
+    {
+      int originalSize = 10;
+      int maxAllowedSize = 10;
+      String nameFilter = "";
+      FileStatus[] fileStatuses = getFileStatuses(originalSize);
+      DirStatus dirStatus2 = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+      Assert.assertEquals(new DirStatus(Arrays.copyOf(fileStatuses, maxAllowedSize), new DirListInfo(originalSize, false, maxAllowedSize, nameFilter)), dirStatus2);
+    }
+
+    {
+      int originalSize = 11;
+      int maxAllowedSize = 2;
+      String nameFilter = "1";
+      FileStatus[] fileStatuses = getFileStatuses(originalSize);
+      DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+      Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[1], fileStatuses[10]}, new DirListInfo(originalSize, false, 2, nameFilter)), dirStatus);
+    }
+
+    {
+      int originalSize = 20;
+      int maxAllowedSize = 3;
+      String nameFilter = "1";
+      FileStatus[] fileStatuses = getFileStatuses(originalSize);
+      DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+      Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[1], fileStatuses[10], fileStatuses[11]}, new DirListInfo(originalSize, true, 3, nameFilter)), dirStatus);
+    }
+
+    {
+      int originalSize = 12;
+      int maxAllowedSize = 3;
+      String nameFilter = "1";
+      FileStatus[] fileStatuses = getFileStatuses(originalSize);
+      DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+      Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[1], fileStatuses[10], fileStatuses[11]}, new DirListInfo(originalSize, false, 3, nameFilter)), dirStatus);
+    }
+
+    {
+      int originalSize = 13;
+      int maxAllowedSize = 3;
+      String nameFilter = "1";
+      FileStatus[] fileStatuses = getFileStatuses(originalSize);
+      DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+      Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[1], fileStatuses[10], fileStatuses[11]}, new DirListInfo(originalSize, true, 3, nameFilter)), dirStatus);
+    }
+
+    {
+      int originalSize = 0;
+      int maxAllowedSize = 3;
+      String nameFilter = "1";
+      FileStatus[] fileStatuses = getFileStatuses(originalSize);
+      DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+      Assert.assertEquals(new DirStatus(new FileStatus[0], new DirListInfo(originalSize, false, originalSize, nameFilter)), dirStatus);
+    }
+
+    {
+      int originalSize = 20;
+      int maxAllowedSize = 3;
+      String nameFilter = "";
+      FileStatus[] fileStatuses = getFileStatuses(originalSize);
+      DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+      Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[0], fileStatuses[1], fileStatuses[2]}, new DirListInfo(originalSize, true, maxAllowedSize, nameFilter)), dirStatus);
+    }
+
+    {
+      int originalSize = 20;
+      int maxAllowedSize = 3;
+      String nameFilter = null;
+      FileStatus[] fileStatuses = getFileStatuses(originalSize);
+      DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+      Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[0], fileStatuses[1], fileStatuses[2]}, new DirListInfo(originalSize, true, maxAllowedSize, nameFilter)), dirStatus);
+    }
+
+    {
+      int originalSize = 3;
+      int maxAllowedSize = 3;
+      String nameFilter = null;
+      FileStatus[] fileStatuses = getFileStatuses(originalSize);
+      DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+      Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[0], fileStatuses[1], fileStatuses[2]}, new DirListInfo(originalSize, false, maxAllowedSize, nameFilter)), dirStatus);
+    }
+
+    {
+      int originalSize = 20;
+      int maxAllowedSize = 3;
+      String nameFilter = "a";
+      FileStatus[] fileStatuses = getFileStatuses(originalSize);
+      DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+      Assert.assertEquals(new DirStatus(new FileStatus[0], new DirListInfo(originalSize, false, 0, nameFilter)), dirStatus);
+    }
+
+  }
+
+  private FileStatus[] getFileStatuses(int numberOfFiles) {
+    FileStatus[] fileStatuses = new FileStatus[numberOfFiles];
+    for(int i = 0 ; i < numberOfFiles; i++){
+      fileStatuses[i] = getFileStatus("/"+i);
+    }
+
+    return fileStatuses;
+  }
+
+  private FileStatus getFileStatus(String path) {
+    return new FileStatus(10, false, 3, 1000, 10000, new Path(path));
+  }
+
+}
\ No newline at end of file