You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/07/06 20:14:07 UTC

[52/53] [abbrv] hbase git commit: HBASE-18002 Investigate why bucket cache filling up in file mode in an exisiting file is slower (Ram)

HBASE-18002 Investigate why bucket cache filling up in file mode in an
exisiting file  is slower (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/50bb0457
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/50bb0457
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/50bb0457

Branch: refs/heads/HBASE-14070.HLC
Commit: 50bb04572371e68726a9176cdcb0668e3fd74feb
Parents: df93c13
Author: Ramkrishna <ra...@intel.com>
Authored: Thu Jul 6 11:20:00 2017 +0530
Committer: Ramkrishna <ra...@intel.com>
Committed: Thu Jul 6 11:20:00 2017 +0530

----------------------------------------------------------------------
 .../hadoop/hbase/io/hfile/bucket/BucketCache.java    |  7 ++++---
 .../hadoop/hbase/io/hfile/bucket/FileIOEngine.java   | 15 ++++++++++++++-
 .../hbase/io/hfile/bucket/TestFileIOEngine.java      |  2 +-
 3 files changed, 19 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/50bb0457/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 3c27f14..489c805 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -227,7 +227,7 @@ public class BucketCache implements BlockCache, HeapSize {
   public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,
       int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration)
       throws FileNotFoundException, IOException {
-    this.ioEngine = getIOEngineFromName(ioEngineName, capacity);
+    this.ioEngine = getIOEngineFromName(ioEngineName, capacity, persistencePath);
     this.writerThreads = new WriterThread[writerThreadNum];
     long blockNumCapacity = capacity / blockSize;
     if (blockNumCapacity >= Integer.MAX_VALUE) {
@@ -309,10 +309,11 @@ public class BucketCache implements BlockCache, HeapSize {
    * Get the IOEngine from the IO engine name
    * @param ioEngineName
    * @param capacity
+   * @param persistencePath
    * @return the IOEngine
    * @throws IOException
    */
-  private IOEngine getIOEngineFromName(String ioEngineName, long capacity)
+  private IOEngine getIOEngineFromName(String ioEngineName, long capacity, String persistencePath)
       throws IOException {
     if (ioEngineName.startsWith("file:") || ioEngineName.startsWith("files:")) {
       // In order to make the usage simple, we only need the prefix 'files:' in
@@ -320,7 +321,7 @@ public class BucketCache implements BlockCache, HeapSize {
       // the compatibility
       String[] filePaths = ioEngineName.substring(ioEngineName.indexOf(":") + 1)
           .split(FileIOEngine.FILE_DELIMITER);
-      return new FileIOEngine(capacity, filePaths);
+      return new FileIOEngine(capacity, persistencePath != null, filePaths);
     } else if (ioEngineName.startsWith("offheap")) {
       return new ByteBufferIOEngine(capacity, true);
     } else if (ioEngineName.startsWith("heap")) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/50bb0457/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
index 7586d57..a847bfe 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
@@ -52,11 +52,24 @@ public class FileIOEngine implements IOEngine {
   private FileReadAccessor readAccessor = new FileReadAccessor();
   private FileWriteAccessor writeAccessor = new FileWriteAccessor();
 
-  public FileIOEngine(long capacity, String... filePaths) throws IOException {
+  public FileIOEngine(long capacity, boolean maintainPersistence, String... filePaths)
+      throws IOException {
     this.sizePerFile = capacity / filePaths.length;
     this.capacity = this.sizePerFile * filePaths.length;
     this.filePaths = filePaths;
     this.fileChannels = new FileChannel[filePaths.length];
+    if (!maintainPersistence) {
+      for (String filePath : filePaths) {
+        File file = new File(filePath);
+        if (file.exists()) {
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("File " + filePath + " already exists. Deleting!!");
+          }
+          file.delete();
+          // If deletion fails still we can manage with the writes
+        }
+      }
+    }
     this.rafs = new RandomAccessFile[filePaths.length];
     for (int i = 0; i < filePaths.length; i++) {
       String filePath = filePaths[i];

http://git-wip-us.apache.org/repos/asf/hbase/blob/50bb0457/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
index d1f3dfe..d13022d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
@@ -58,7 +58,7 @@ public class TestFileIOEngine {
       boundaryStopPositions.add(sizePerFile * i + 1);
     }
     boundaryStopPositions.add(sizePerFile * filePaths.length - 1);
-    FileIOEngine fileIOEngine = new FileIOEngine(totalCapacity, filePaths);
+    FileIOEngine fileIOEngine = new FileIOEngine(totalCapacity, false, filePaths);
     try {
       for (int i = 0; i < 500; i++) {
         int len = (int) Math.floor(Math.random() * 100);