You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ozone.apache.org by sa...@apache.org on 2022/05/25 02:24:00 UTC

[ozone] branch HDDS-3630 updated: HDDS-6792: [Merge RocksDB in Datanode] Fix issues reportd by sonar. (#3446)

This is an automated email from the ASF dual-hosted git repository.

sammichen pushed a commit to branch HDDS-3630
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/HDDS-3630 by this push:
     new 48525d793d HDDS-6792: [Merge RocksDB in Datanode] Fix issues reportd by sonar. (#3446)
48525d793d is described below

commit 48525d793d33592bcc25cb66d367fa759ea1975e
Author: Sammi Chen <sa...@apache.org>
AuthorDate: Wed May 25 10:23:55 2022 +0800

    HDDS-6792: [Merge RocksDB in Datanode] Fix issues reportd by sonar. (#3446)
---
 .../upgrade/TestDatanodeUpgradeToSchemaV3.java     |  2 +-
 .../hadoop/hdds/utils/db/DumpFileLoader.java       |  8 ++++-
 .../hadoop/hdds/utils/db/RDBSstFileLoader.java     | 15 +++++++--
 .../hadoop/hdds/utils/db/RDBSstFileWriter.java     | 37 +++++++++++++++-------
 .../org/apache/hadoop/hdds/utils/db/RDBTable.java  |  5 +--
 5 files changed, 49 insertions(+), 18 deletions(-)

diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/upgrade/TestDatanodeUpgradeToSchemaV3.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/upgrade/TestDatanodeUpgradeToSchemaV3.java
index 9082166b6f..a7e61c5a9f 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/upgrade/TestDatanodeUpgradeToSchemaV3.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/upgrade/TestDatanodeUpgradeToSchemaV3.java
@@ -155,7 +155,7 @@ public class TestDatanodeUpgradeToSchemaV3 {
 
     // RocksDB loaded when SchemaV3 is enabled
     if (VersionedDatanodeFeatures.SchemaV3.isFinalizedAndEnabled(conf)) {
-      Assert.assertNotNull(dataVolume.getDbParentDir().getAbsolutePath()
+      Assert.assertTrue(dataVolume.getDbParentDir().getAbsolutePath()
           .startsWith(dataVolume.getStorageDir().toString()));
     } else {
       // RocksDB is not loaded when SchemaV3 is disabled.
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DumpFileLoader.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DumpFileLoader.java
index 9355ba7399..16655cc54d 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DumpFileLoader.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DumpFileLoader.java
@@ -17,16 +17,22 @@
  */
 package org.apache.hadoop.hdds.utils.db;
 
+import java.io.Closeable;
 import java.io.File;
 import java.io.IOException;
 
 /**
  * Interface for loading data from a dump file.
  */
-public interface DumpFileLoader {
+public interface DumpFileLoader extends Closeable {
 
   /**
    * Load key value pairs from an external dump file.
    */
   void load(File externalFile) throws IOException;
+
+  /**
+   * Close this file loader.
+   */
+  void close();
 }
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBSstFileLoader.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBSstFileLoader.java
index f1d4149b83..4e6c85b38d 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBSstFileLoader.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBSstFileLoader.java
@@ -22,6 +22,7 @@ import org.rocksdb.IngestExternalFileOptions;
 import org.rocksdb.RocksDB;
 import org.rocksdb.RocksDBException;
 
+import java.io.Closeable;
 import java.io.File;
 import java.io.IOException;
 import java.util.Collections;
@@ -31,11 +32,11 @@ import static org.apache.hadoop.hdds.utils.HddsServerUtil.toIOException;
 /**
  * DumpFileLoader using rocksdb sst files.
  */
-public class RDBSstFileLoader implements DumpFileLoader {
+public class RDBSstFileLoader implements DumpFileLoader, Closeable {
 
   private final RocksDB db;
   private final ColumnFamilyHandle handle;
-  private final IngestExternalFileOptions ingestOptions;
+  private IngestExternalFileOptions ingestOptions;
 
 
   public RDBSstFileLoader(RocksDB db, ColumnFamilyHandle handle) {
@@ -60,6 +61,16 @@ public class RDBSstFileLoader implements DumpFileLoader {
       throw toIOException("Failed to ingest external file "
           + externalFile.getAbsolutePath() + ", ingestBehind:"
           + ingestOptions.ingestBehind(), e);
+    } finally {
+      close();
+    }
+  }
+
+  @Override
+  public void close() {
+    if (ingestOptions != null) {
+      ingestOptions.close();
+      ingestOptions = null;
     }
   }
 }
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBSstFileWriter.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBSstFileWriter.java
index a568632f91..8614babe6c 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBSstFileWriter.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBSstFileWriter.java
@@ -22,6 +22,7 @@ import org.rocksdb.Options;
 import org.rocksdb.RocksDBException;
 import org.rocksdb.SstFileWriter;
 
+import java.io.Closeable;
 import java.io.File;
 import java.io.IOException;
 import java.util.concurrent.atomic.AtomicLong;
@@ -31,9 +32,9 @@ import static org.apache.hadoop.hdds.utils.HddsServerUtil.toIOException;
 /**
  * DumpFileWriter using rocksdb sst files.
  */
-public class RDBSstFileWriter implements DumpFileWriter {
+public class RDBSstFileWriter implements DumpFileWriter, Closeable {
 
-  private final SstFileWriter sstFileWriter;
+  private SstFileWriter sstFileWriter;
   private File sstFile;
   private AtomicLong keyCounter;
 
@@ -51,6 +52,7 @@ public class RDBSstFileWriter implements DumpFileWriter {
       // Here will create a new sst file each time, not append to existing
       sstFileWriter.open(sstFile.getAbsolutePath());
     } catch (RocksDBException e) {
+      closeOnFailure();
       throw toIOException("Failed to open external file for dump "
           + sstFile.getAbsolutePath(), e);
     }
@@ -62,6 +64,7 @@ public class RDBSstFileWriter implements DumpFileWriter {
       sstFileWriter.put(key, value);
       keyCounter.incrementAndGet();
     } catch (RocksDBException e) {
+      closeOnFailure();
       throw toIOException("Failed to put kv into dump file "
           + sstFile.getAbsolutePath(), e);
     }
@@ -69,18 +72,28 @@ public class RDBSstFileWriter implements DumpFileWriter {
 
   @Override
   public void close() throws IOException {
-    try {
-      // We should check for empty sst file, or we'll get exception.
-      if (keyCounter.get() > 0) {
-        sstFileWriter.finish();
+    if (sstFileWriter != null) {
+      try {
+        // We should check for empty sst file, or we'll get exception.
+        if (keyCounter.get() > 0) {
+          sstFileWriter.finish();
+        }
+      } catch (RocksDBException e) {
+        throw toIOException("Failed to finish dumping into file "
+            + sstFile.getAbsolutePath(), e);
+      } finally {
+        sstFileWriter.close();
+        sstFileWriter = null;
       }
-    } catch (RocksDBException e) {
-      throw toIOException("Failed to finish dumping into file "
-          + sstFile.getAbsolutePath(), e);
-    } finally {
-      sstFileWriter.close();
+
+      keyCounter.set(0);
     }
+  }
 
-    keyCounter.set(0);
+  private void closeOnFailure() {
+    if (sstFileWriter != null) {
+      sstFileWriter.close();
+      sstFileWriter = null;
+    }
   }
 }
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java
index 3767fa7474..2f7c956ea5 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java
@@ -286,8 +286,9 @@ class RDBTable implements Table<byte[], byte[]> {
 
   @Override
   public void loadFromFile(File externalFile) throws IOException {
-    DumpFileLoader fileLoader = new RDBSstFileLoader(db, handle);
-    fileLoader.load(externalFile);
+    try (DumpFileLoader fileLoader = new RDBSstFileLoader(db, handle)) {
+      fileLoader.load(externalFile);
+    }
   }
 
   private List<ByteArrayKeyValue> getRangeKVs(byte[] startKey,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@ozone.apache.org
For additional commands, e-mail: commits-help@ozone.apache.org