You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2019/06/03 18:34:06 UTC
[hbase] branch branch-1.4 updated: HBASE-22509 Address
findbugs/spotbugs complaints (branch-1.4) (#277)
This is an automated email from the ASF dual-hosted git repository.
apurtell pushed a commit to branch branch-1.4
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-1.4 by this push:
new 81efa17 HBASE-22509 Address findbugs/spotbugs complaints (branch-1.4) (#277)
81efa17 is described below
commit 81efa17489efb14e4e32da43c422c6b31d2fa4e1
Author: Andrew Purtell <ap...@apache.org>
AuthorDate: Mon Jun 3 11:34:00 2019 -0700
HBASE-22509 Address findbugs/spotbugs complaints (branch-1.4) (#277)
Signed-off-by: Sean Busbey <bu...@apache.org>
---
.../apache/hadoop/hbase/io/encoding/EncodedDataBlock.java | 7 ++++++-
.../org/apache/hadoop/metrics2/lib/MetricsExecutorImpl.java | 2 +-
.../java/org/apache/hadoop/hbase/regionserver/HRegion.java | 13 ++++++++++---
.../java/org/apache/hadoop/hbase/regionserver/HStore.java | 6 +++++-
4 files changed, 22 insertions(+), 6 deletions(-)
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
index 192c84d..c4638cc 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
@@ -255,7 +255,7 @@ public class EncodedDataBlock {
}
BufferGrabbingByteArrayOutputStream stream = new BufferGrabbingByteArrayOutputStream();
baos.writeTo(stream);
- this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.ourBytes);
+ this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.toByteArray());
} catch (IOException e) {
throw new RuntimeException(String.format(
"Bug in encoding part of algorithm %s. " +
@@ -272,6 +272,11 @@ public class EncodedDataBlock {
public synchronized void write(byte[] b, int off, int len) {
this.ourBytes = b;
}
+
+ @Override
+ public synchronized byte[] toByteArray() {
+ return ourBytes;
+ }
}
@Override
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MetricsExecutorImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MetricsExecutorImpl.java
index c381609..0a83a5e 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MetricsExecutorImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/MetricsExecutorImpl.java
@@ -48,7 +48,7 @@ public class MetricsExecutorImpl implements MetricsExecutor {
private enum ExecutorSingleton {
INSTANCE;
- private final ScheduledExecutorService scheduler = new ScheduledThreadPoolExecutor(1,
+ private transient final ScheduledExecutorService scheduler = new ScheduledThreadPoolExecutor(1,
new ThreadPoolExecutorThreadFactory("HBase-Metrics2-"));
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index f0f1b96..6e2fb19 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -7963,7 +7963,9 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
for (Map.Entry<Store, List<Cell>> entry: removedCellsForMemStore.entrySet()) {
entry.getKey().add(entry.getValue());
}
- if (we != null) mvcc.complete(we);
+ if (we != null) {
+ mvcc.complete(we);
+ }
} else if (we != null) {
mvcc.completeAndWait(we);
}
@@ -8184,6 +8186,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
rowLock.release();
}
// if the wal sync was unsuccessful, remove keys from memstore
+ WriteEntry we = walKey != null ? walKey.getWriteEntry() : null;
if (doRollBackMemstore) {
for (Map.Entry<Store, List<Cell>> entry: forMemStore.entrySet()) {
rollbackMemstore(entry.getKey(), entry.getValue());
@@ -8191,9 +8194,13 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
for (Map.Entry<Store, List<Cell>> entry: removedCellsForMemStore.entrySet()) {
entry.getKey().add(entry.getValue());
}
- if (walKey != null) mvcc.complete(walKey.getWriteEntry());
+ if (we != null) {
+ mvcc.complete(we);
+ }
} else {
- if (walKey != null) mvcc.completeAndWait(walKey.getWriteEntry());
+ if (we != null) {
+ mvcc.completeAndWait(we);
+ }
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index 5dbe936..40a5b64 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -2836,10 +2836,14 @@ public class HStore implements Store {
// Just close and return
filesToRemove.add(file);
} else {
- LOG.info("Can't archive compacted file " + file.getPath()
+ if (r != null) {
+ LOG.info("Can't archive compacted file " + file.getPath()
+ " because of either isCompactedAway=" + r.isCompactedAway()
+ " or file has reference, isReferencedInReads=" + r.isReferencedInReads()
+ ", refCount=" + r.getRefCount() + ", skipping for now.");
+ } else {
+ LOG.info("Can't archive compacted file " + file.getPath() + ", skipping for now.");
+ }
}
} catch (Exception e) {
LOG.error(