You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2016/06/27 01:55:37 UTC
[1/9] hbase git commit: HBASE-15946 Eliminate possible security
concerns in RS web UI's store file metrics (Sean Mackrory)
Repository: hbase
Updated Branches:
refs/heads/0.98 4a37af5db -> 41f6f7e30
HBASE-15946 Eliminate possible security concerns in RS web UI's store file metrics (Sean Mackrory)
Conflicts:
hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
hbase-server/src/main/resources/hbase-webapps/rest/rest.jsp
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/947e74ef
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/947e74ef
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/947e74ef
Branch: refs/heads/0.98
Commit: 947e74efa7eb92eec9b8b02cf2c73d5391be0ab1
Parents: 4a37af5
Author: Sean Mackrory <ma...@apache.org>
Authored: Tue May 31 10:28:27 2016 -0600
Committer: Andrew Purtell <ap...@apache.org>
Committed: Fri Jun 24 10:37:35 2016 -0700
----------------------------------------------------------------------
.../hbase/io/hfile/HFilePrettyPrinter.java | 115 ++++++++++++-------
.../main/resources/hbase-webapps/rest/rest.jsp | 64 +++++------
2 files changed, 101 insertions(+), 78 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/947e74ef/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
index d5abdc4..270b552 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
@@ -1,4 +1,3 @@
-
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
@@ -88,6 +87,9 @@ public class HFilePrettyPrinter extends Configured implements Tool {
private boolean checkFamily;
private boolean isSeekToRow = false;
+ private PrintStream out = System.out;
+ private PrintStream err = System.err;
+
/**
* The row which the user wants to specify and print all the KeyValues for.
*/
@@ -131,6 +133,11 @@ public class HFilePrettyPrinter extends Configured implements Tool {
options.addOptionGroup(files);
}
+ public void setPrintStreams(PrintStream out, PrintStream err) {
+ this.out = out;
+ this.err = err;
+ }
+
public boolean parseOptions(String args[]) throws ParseException,
IOException {
if (args.length == 0) {
@@ -161,7 +168,7 @@ public class HFilePrettyPrinter extends Configured implements Tool {
row = key.getBytes();
isSeekToRow = true;
} else {
- System.err.println("Invalid row is specified.");
+ err.println("Invalid row is specified.");
System.exit(-1);
}
}
@@ -175,17 +182,17 @@ public class HFilePrettyPrinter extends Configured implements Tool {
String enc = HRegionInfo.encodeRegionName(rn);
Path regionDir = new Path(tableDir, enc);
if (verbose)
- System.out.println("region dir -> " + regionDir);
+ out.println("region dir -> " + regionDir);
List<Path> regionFiles = HFile.getStoreFiles(FileSystem.get(getConf()),
regionDir);
if (verbose)
- System.out.println("Number of region files found -> "
+ out.println("Number of region files found -> "
+ regionFiles.size());
if (verbose) {
int i = 1;
for (Path p : regionFiles) {
if (verbose)
- System.out.println("Found file[" + i++ + "] -> " + p);
+ out.println("Found file[" + i++ + "] -> " + p);
}
}
files.addAll(regionFiles);
@@ -217,27 +224,46 @@ public class HFilePrettyPrinter extends Configured implements Tool {
// iterate over all files found
for (Path fileName : files) {
try {
- processFile(fileName);
+ int exitCode = processFile(fileName);
+ if (exitCode != 0) {
+ return exitCode;
+ }
} catch (IOException ex) {
LOG.error("Error reading " + fileName, ex);
- System.exit(-2);
+ return -2;
}
}
if (verbose || printKey) {
- System.out.println("Scanned kv count -> " + count);
+ out.println("Scanned kv count -> " + count);
}
return 0;
}
- private void processFile(Path file) throws IOException {
+ public int processFile(Path file) throws IOException {
if (verbose)
- System.out.println("Scanning -> " + file);
+ out.println("Scanning -> " + file);
+
+ Path rootPath = FSUtils.getRootDir(getConf());
+ String rootString = rootPath + rootPath.SEPARATOR;
+ if (!file.toString().startsWith(rootString)) {
+ // First we see if fully-qualified URI matches the root dir. It might
+ // also be an absolute path in the same filesystem, so we prepend the FS
+ // of the root dir and see if that fully-qualified URI matches.
+ FileSystem rootFS = rootPath.getFileSystem(getConf());
+ String qualifiedFile = rootFS.getUri().toString() + file.toString();
+ if (!qualifiedFile.startsWith(rootString)) {
+ err.println("ERROR, file (" + file +
+ ") is not in HBase's root directory (" + rootString + ")");
+ return -2;
+ }
+ }
+
FileSystem fs = file.getFileSystem(getConf());
if (!fs.exists(file)) {
- System.err.println("ERROR, file doesnt exist: " + file);
- System.exit(-2);
+ err.println("ERROR, file doesnt exist: " + file);
+ return -2;
}
HFile.Reader reader = HFile.createReader(fs, file, new CacheConfig(getConf()), getConf());
@@ -268,12 +294,12 @@ public class HFilePrettyPrinter extends Configured implements Tool {
}
if (printBlockIndex) {
- System.out.println("Block Index:");
- System.out.println(reader.getDataBlockIndexReader());
+ out.println("Block Index:");
+ out.println(reader.getDataBlockIndexReader());
}
if (printBlockHeaders) {
- System.out.println("Block Headers:");
+ out.println("Block Headers:");
/*
* TODO: this same/similar block iteration logic is used in HFileBlock#blockRange and
* TestLazyDataBlockDecompression. Refactor?
@@ -289,16 +315,17 @@ public class HFilePrettyPrinter extends Configured implements Tool {
block = reader.readBlock(offset, -1, /* cacheBlock */ false, /* pread */ false,
/* isCompaction */ false, /* updateCacheMetrics */ false, null);
offset += block.getOnDiskSizeWithHeader();
- System.out.println(block);
+ out.println(block);
}
}
if (printStats) {
fileStats.finish();
- System.out.println("Stats:\n" + fileStats);
+ out.println("Stats:\n" + fileStats);
}
reader.close();
+ return 0;
}
private void scanKeysValues(Path file, KeyValueStatsCollector fileStats,
@@ -320,22 +347,24 @@ public class HFilePrettyPrinter extends Configured implements Tool {
}
// dump key value
if (printKey) {
- System.out.print("K: " + kv);
+ out.print("K: " + kv);
if (printValue) {
- System.out.print(" V: " + Bytes.toStringBinary(kv.getValue()));
+ out.print(" V: "
+ + Bytes.toStringBinary(kv.getValueArray(), kv.getValueOffset(),
+ kv.getValueLength()));
int i = 0;
List<Tag> tags = kv.getTags();
for (Tag tag : tags) {
- System.out
- .print(String.format(" T[%d]: %s", i++, Bytes.toStringBinary(tag.getValue())));
+ out.print(String.format(" T[%d]: %s", i++,
+ Bytes.toStringBinary(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength())));
}
}
- System.out.println();
+ out.println();
}
// check if rows are in order
if (checkRow && pkv != null) {
if (Bytes.compareTo(pkv.getRow(), kv.getRow()) > 0) {
- System.err.println("WARNING, previous row is greater then"
+ err.println("WARNING, previous row is greater then"
+ " current row\n\tfilename -> " + file + "\n\tprevious -> "
+ Bytes.toStringBinary(pkv.getKey()) + "\n\tcurrent -> "
+ Bytes.toStringBinary(kv.getKey()));
@@ -345,13 +374,13 @@ public class HFilePrettyPrinter extends Configured implements Tool {
if (checkFamily) {
String fam = Bytes.toString(kv.getFamily());
if (!file.toString().contains(fam)) {
- System.err.println("WARNING, filename does not match kv family,"
+ err.println("WARNING, filename does not match kv family,"
+ "\n\tfilename -> " + file + "\n\tkeyvalue -> "
+ Bytes.toStringBinary(kv.getKey()));
}
if (pkv != null
&& !Bytes.equals(pkv.getFamily(), kv.getFamily())) {
- System.err.println("WARNING, previous kv has different family"
+ err.println("WARNING, previous kv has different family"
+ " compared to current key\n\tfilename -> " + file
+ "\n\tprevious -> " + Bytes.toStringBinary(pkv.getKey())
+ "\n\tcurrent -> " + Bytes.toStringBinary(kv.getKey()));
@@ -373,33 +402,35 @@ public class HFilePrettyPrinter extends Configured implements Tool {
private void printMeta(HFile.Reader reader, Map<byte[], byte[]> fileInfo)
throws IOException {
- System.out.println("Block index size as per heapsize: "
+ out.println("Block index size as per heapsize: "
+ reader.indexSize());
- System.out.println(asSeparateLines(reader.toString()));
- System.out.println("Trailer:\n "
+ out.println(asSeparateLines(reader.toString()));
+ out.println("Trailer:\n "
+ asSeparateLines(reader.getTrailer().toString()));
- System.out.println("Fileinfo:");
+ out.println("Fileinfo:");
for (Map.Entry<byte[], byte[]> e : fileInfo.entrySet()) {
- System.out.print(FOUR_SPACES + Bytes.toString(e.getKey()) + " = ");
+ out.print(FOUR_SPACES + Bytes.toString(e.getKey()) + " = ");
if (Bytes.compareTo(e.getKey(), Bytes.toBytes("MAX_SEQ_ID_KEY")) == 0) {
long seqid = Bytes.toLong(e.getValue());
- System.out.println(seqid);
+ out.println(seqid);
} else if (Bytes.compareTo(e.getKey(), Bytes.toBytes("TIMERANGE")) == 0) {
+
TimeRangeTracker timeRangeTracker = new TimeRangeTracker();
Writables.copyWritable(e.getValue(), timeRangeTracker);
- System.out.println(timeRangeTracker.getMin() + "...." + timeRangeTracker.getMax());
+ out.println(timeRangeTracker.getMin() + "...." + timeRangeTracker.getMax());
} else if (Bytes.compareTo(e.getKey(), FileInfo.AVG_KEY_LEN) == 0
|| Bytes.compareTo(e.getKey(), FileInfo.AVG_VALUE_LEN) == 0) {
- System.out.println(Bytes.toInt(e.getValue()));
+ out.println(Bytes.toInt(e.getValue()));
} else {
- System.out.println(Bytes.toStringBinary(e.getValue()));
+ out.println(Bytes.toStringBinary(e.getValue()));
}
}
try {
- System.out.println("Mid-key: " + Bytes.toStringBinary(reader.midkey()));
+
+ out.println("Mid-key: " + Bytes.toStringBinary(reader.midkey()));
} catch (Exception e) {
- System.out.println ("Unable to retrieve the midkey");
+ out.println ("Unable to retrieve the midkey");
}
// Printing general bloom information
@@ -408,12 +439,12 @@ public class HFilePrettyPrinter extends Configured implements Tool {
if (bloomMeta != null)
bloomFilter = BloomFilterFactory.createFromMeta(bloomMeta, reader);
- System.out.println("Bloom filter:");
+ out.println("Bloom filter:");
if (bloomFilter != null) {
- System.out.println(FOUR_SPACES + bloomFilter.toString().replaceAll(
+ out.println(FOUR_SPACES + bloomFilter.toString().replaceAll(
ByteBloomFilter.STATS_RECORD_SEP, "\n" + FOUR_SPACES));
} else {
- System.out.println(FOUR_SPACES + "Not present");
+ out.println(FOUR_SPACES + "Not present");
}
// Printing delete bloom information
@@ -422,13 +453,13 @@ public class HFilePrettyPrinter extends Configured implements Tool {
if (bloomMeta != null)
bloomFilter = BloomFilterFactory.createFromMeta(bloomMeta, reader);
- System.out.println("Delete Family Bloom filter:");
+ out.println("Delete Family Bloom filter:");
if (bloomFilter != null) {
- System.out.println(FOUR_SPACES
+ out.println(FOUR_SPACES
+ bloomFilter.toString().replaceAll(ByteBloomFilter.STATS_RECORD_SEP,
"\n" + FOUR_SPACES));
} else {
- System.out.println(FOUR_SPACES + "Not present");
+ out.println(FOUR_SPACES + "Not present");
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/947e74ef/hbase-server/src/main/resources/hbase-webapps/rest/rest.jsp
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/resources/hbase-webapps/rest/rest.jsp b/hbase-server/src/main/resources/hbase-webapps/rest/rest.jsp
index 810569c..8b49492 100644
--- a/hbase-server/src/main/resources/hbase-webapps/rest/rest.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/rest/rest.jsp
@@ -18,10 +18,15 @@
*/
--%>
<%@ page contentType="text/html;charset=UTF-8"
+ import="java.io.ByteArrayOutputStream"
+ import="java.io.PrintStream"
import="org.apache.hadoop.conf.Configuration"
+ import="org.apache.hadoop.fs.Path"
import="org.apache.hadoop.hbase.HBaseConfiguration"
- import="org.apache.hadoop.hbase.util.VersionInfo"
- import="java.util.Date"%>
+ import="org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter"
+ import="org.apache.hadoop.hbase.regionserver.HRegionServer"
+ import="org.apache.hadoop.hbase.regionserver.StoreFile"
+ %>
<%
Configuration conf = (Configuration)getServletContext().getAttribute("hbase.conf");
long startcode = conf.getLong("startcode", System.currentTimeMillis());
@@ -74,40 +79,27 @@ String listenPort = conf.get("hbase.rest.port", "8080");
<h1>RESTServer <small><%= listenPort %></small></h1>
</div>
</div>
- <div class="row">
-
- <section>
- <h2>Software Attributes</h2>
- <table id="attributes_table" class="table table-striped">
- <tr>
- <th>Attribute Name</th>
- <th>Value</th>
- <th>Description</th>
- </tr>
- <tr>
- <td>HBase Version</td>
- <td><%= VersionInfo.getVersion() %>, r<%= VersionInfo.getRevision() %></td>
- <td>HBase version and revision</td>
- </tr>
- <tr>
- <td>HBase Compiled</td>
- <td><%= VersionInfo.getDate() %>, <%= VersionInfo.getUser() %></td>
- <td>When HBase version was compiled and by whom</td>
- </tr>
- <tr>
- <td>REST Server Start Time</td>
- <td><%= new Date(startcode) %></td>
- <td>Date stamp of when this REST server was started</td>
- </tr>
- </table>
- </section>
- </div>
- <div class="row">
-
- <section>
-<a href="http://wiki.apache.org/hadoop/Hbase/Stargate">Apache HBase Wiki on REST</a>
- </section>
- </div>
+ <pre>
+<%
+ try {
+ ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
+ PrintStream printerOutput = new PrintStream(byteStream);
+ HFilePrettyPrinter printer = new HFilePrettyPrinter();
+ printer.setPrintStreams(printerOutput, printerOutput);
+ printer.setConf(conf);
+ String[] options = {"-s"};
+ printer.parseOptions(options);
+ printer.processFile(new Path(storeFile));
+ String text = byteStream.toString();%>
+ <%=
+ text
+ %>
+ <%}
+ catch (Exception e) {%>
+ <%= e %>
+ <%}
+%>
+ </pre>
</div>
<script src="/static/js/jquery.min.js" type="text/javascript"></script>
<script src="/static/js/bootstrap.min.js" type="text/javascript"></script>
[9/9] hbase git commit: HBASE-16085 Add a metric for failed
compactions
Posted by ap...@apache.org.
HBASE-16085 Add a metric for failed compactions
Conflicts:
hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
Amending-Author: Andrew Purtell <ap...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/41f6f7e3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/41f6f7e3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/41f6f7e3
Branch: refs/heads/0.98
Commit: 41f6f7e30915977f178071fae2ac3b3585347222
Parents: 099b3da
Author: Gary Helmling <ga...@apache.org>
Authored: Wed Jun 22 17:08:28 2016 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Fri Jun 24 11:36:22 2016 -0700
----------------------------------------------------------------------
.../hbase/regionserver/MetricsRegionSource.java | 2 +
.../regionserver/MetricsRegionWrapper.java | 7 ++++
.../regionserver/MetricsRegionSourceImpl.java | 3 ++
.../TestMetricsRegionSourceImpl.java | 6 +++
.../regionserver/MetricsRegionSourceImpl.java | 3 ++
.../TestMetricsRegionSourceImpl.java | 5 +++
.../hbase/regionserver/CompactSplitThread.java | 2 +
.../hadoop/hbase/regionserver/HRegion.java | 10 ++++-
.../regionserver/MetricsRegionWrapperImpl.java | 5 +++
.../regionserver/MetricsRegionWrapperStub.java | 5 +++
.../hbase/regionserver/TestCompaction.java | 43 ++++++++++++++++++++
11 files changed, 89 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/41f6f7e3/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
index 8abbb1f..cfe95ed 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
@@ -28,9 +28,11 @@ public interface MetricsRegionSource extends Comparable<MetricsRegionSource> {
String OPS_SAMPLE_NAME = "ops";
String SIZE_VALUE_NAME = "size";
String COMPACTIONS_COMPLETED_COUNT = "compactionsCompletedCount";
+ String COMPACTIONS_FAILED_COUNT = "compactionsFailedCount";
String NUM_BYTES_COMPACTED_COUNT = "numBytesCompactedCount";
String NUM_FILES_COMPACTED_COUNT = "numFilesCompactedCount";
String COMPACTIONS_COMPLETED_DESC = "Number of compactions that have completed.";
+ String COMPACTIONS_FAILED_DESC = "Number of compactions that have failed.";
String NUM_BYTES_COMPACTED_DESC =
"Sum of filesize on all files entering a finished, successful or aborted, compaction";
String NUM_FILES_COMPACTED_DESC =
http://git-wip-us.apache.org/repos/asf/hbase/blob/41f6f7e3/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
index 2a3bfb7..5349b5b 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
@@ -105,6 +105,13 @@ public interface MetricsRegionWrapper {
long getNumCompactionsCompleted();
/**
+ * Returns the total number of compactions that have been reported as failed on this region.
+ * Note that a given compaction can be reported as both completed and failed if an exception
+ * is thrown in the processing after {@code HRegion.compact()}.
+ */
+ long getNumCompactionsFailed();
+
+ /**
* Get the time spent by coprocessors in this region.
*/
Map<String, DescriptiveStatistics> getCoprocessorExecutionStatistics();
http://git-wip-us.apache.org/repos/asf/hbase/blob/41f6f7e3/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java b/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
index 358d287..31d7721 100644
--- a/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
+++ b/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
@@ -199,6 +199,9 @@ public class MetricsRegionSourceImpl implements MetricsRegionSource {
mrb.addCounter(regionNamePrefix + MetricsRegionSource.COMPACTIONS_COMPLETED_COUNT,
MetricsRegionSource.COMPACTIONS_COMPLETED_DESC,
this.regionWrapper.getNumCompactionsCompleted());
+ mrb.addCounter(regionNamePrefix + MetricsRegionSource.COMPACTIONS_FAILED_COUNT,
+ MetricsRegionSource.COMPACTIONS_FAILED_DESC,
+ this.regionWrapper.getNumCompactionsFailed());
mrb.addCounter(regionNamePrefix + MetricsRegionSource.NUM_BYTES_COMPACTED_COUNT,
MetricsRegionSource.NUM_BYTES_COMPACTED_DESC,
this.regionWrapper.getNumBytesCompacted());
http://git-wip-us.apache.org/repos/asf/hbase/blob/41f6f7e3/hbase-hadoop1-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop1-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java b/hbase-hadoop1-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
index aca04c7..c041ffa 100644
--- a/hbase-hadoop1-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
+++ b/hbase-hadoop1-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
@@ -144,9 +144,15 @@ public class TestMetricsRegionSourceImpl {
public long getNumCompactionsCompleted() {
return 0;
}
+
@Override
public Map<String, DescriptiveStatistics> getCoprocessorExecutionStatistics() {
return null;
}
+
+ @Override
+ public long getNumCompactionsFailed() {
+ return 0;
+ }
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/41f6f7e3/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
index 8bc87e0..b8a74ca 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
@@ -201,6 +201,9 @@ public class MetricsRegionSourceImpl implements MetricsRegionSource {
mrb.addCounter(Interns.info(regionNamePrefix + MetricsRegionSource.NUM_BYTES_COMPACTED_COUNT,
MetricsRegionSource.NUM_BYTES_COMPACTED_DESC),
this.regionWrapper.getNumBytesCompacted());
+ mrb.addCounter(Interns.info(regionNamePrefix + MetricsRegionSource.COMPACTIONS_FAILED_COUNT,
+ MetricsRegionSource.COMPACTIONS_FAILED_DESC),
+ this.regionWrapper.getNumCompactionsFailed());
mrb.addCounter(Interns.info(regionNamePrefix + MetricsRegionSource.NUM_FILES_COMPACTED_COUNT,
MetricsRegionSource.NUM_FILES_COMPACTED_DESC),
this.regionWrapper.getNumFilesCompacted());
http://git-wip-us.apache.org/repos/asf/hbase/blob/41f6f7e3/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
index 539e235..5d0ab82 100644
--- a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
+++ b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java
@@ -149,5 +149,10 @@ public class TestMetricsRegionSourceImpl {
public Map<String, DescriptiveStatistics> getCoprocessorExecutionStatistics() {
return null;
}
+
+ @Override
+ public long getNumCompactionsFailed() {
+ return 0;
+ }
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/41f6f7e3/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java
index 3e73ec1..5246287 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java
@@ -518,9 +518,11 @@ public class CompactSplitThread implements CompactionRequestor {
if (remoteEx != ex) {
LOG.info("Compaction failed at original callstack: " + formatStackTrace(ex));
}
+ region.reportCompactionRequestFailure();
server.checkFileSystem();
} catch (Exception ex) {
LOG.error("Compaction failed " + this, ex);
+ region.reportCompactionRequestFailure();
server.checkFileSystem();
} finally {
LOG.debug("CompactSplitThread Status: " + CompactSplitThread.this);
http://git-wip-us.apache.org/repos/asf/hbase/blob/41f6f7e3/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 94e3851..bd2b7e3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -291,6 +291,7 @@ public class HRegion implements HeapSize { // , Writable{
// Compaction counters
final AtomicLong compactionsFinished = new AtomicLong(0L);
+ final AtomicLong compactionsFailed = new AtomicLong(0L);
final AtomicLong compactionNumFilesCompacted = new AtomicLong(0L);
final AtomicLong compactionNumBytesCompacted = new AtomicLong(0L);
@@ -5946,7 +5947,7 @@ public class HRegion implements HeapSize { // , Writable{
public static final long FIXED_OVERHEAD = ClassSize.align(
ClassSize.OBJECT +
ClassSize.ARRAY +
- 42 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
+ 43 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
(12 * Bytes.SIZEOF_LONG) +
5 * Bytes.SIZEOF_BOOLEAN);
@@ -5963,7 +5964,8 @@ public class HRegion implements HeapSize { // , Writable{
public static final long DEEP_OVERHEAD = FIXED_OVERHEAD +
ClassSize.OBJECT + // closeLock
(2 * ClassSize.ATOMIC_BOOLEAN) + // closed, closing
- (3 * ClassSize.ATOMIC_LONG) + // memStoreSize, numPutsWithoutWAL, dataInMemoryWithoutWAL
+ (4 * ClassSize.ATOMIC_LONG) + // memStoreSize, numPutsWithoutWAL, dataInMemoryWithoutWAL,
+ // compactionsFailed
(2 * ClassSize.CONCURRENT_HASHMAP) + // lockedRows, scannerReadPoints
WriteState.HEAP_SIZE + // writestate
ClassSize.CONCURRENT_SKIPLISTMAP + ClassSize.CONCURRENT_SKIPLISTMAP_ENTRY + // stores
@@ -6570,6 +6572,10 @@ public class HRegion implements HeapSize { // , Writable{
assert newValue >= 0;
}
+ public void reportCompactionRequestFailure() {
+ compactionsFailed.incrementAndGet();
+ }
+
/**
* @return sequenceId.
*/
http://git-wip-us.apache.org/repos/asf/hbase/blob/41f6f7e3/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
index 1cdcb0f..d66409a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
@@ -140,6 +140,11 @@ public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable
}
@Override
+ public long getNumCompactionsFailed() {
+ return this.region.compactionsFailed.get();
+ }
+
+ @Override
public long getMaxStoreFileAge() {
return maxStoreFileAge;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/41f6f7e3/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
index c41f93a..874f4e2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java
@@ -109,4 +109,9 @@ public class MetricsRegionWrapperStub implements MetricsRegionWrapper {
public Map<String, DescriptiveStatistics> getCoprocessorExecutionStatistics() {
return new HashMap<String, DescriptiveStatistics>();
}
+
+ @Override
+ public long getNumCompactionsFailed() {
+ return 0;
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/41f6f7e3/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
index 9aa0142..c2422e2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
@@ -36,6 +36,7 @@ import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -297,6 +298,48 @@ public class TestCompaction {
thread.interruptIfNecessary();
}
+ @Test
+ public void testCompactionFailure() throws Exception {
+ // setup a compact/split thread on a mock server
+ HRegionServer mockServer = Mockito.mock(HRegionServer.class);
+ Mockito.when(mockServer.getConfiguration()).thenReturn(r.getBaseConf());
+ CompactSplitThread thread = new CompactSplitThread(mockServer);
+ Mockito.when(mockServer.getCompactSplitThread()).thenReturn(thread);
+
+ // setup a region/store with some files
+ Store store = r.getStore(COLUMN_FAMILY);
+ createStoreFile(r);
+ for (int i = 0; i < HStore.DEFAULT_BLOCKING_STOREFILE_COUNT - 1; i++) {
+ createStoreFile(r);
+ }
+
+ HRegion mockRegion = Mockito.spy(r);
+ Mockito.when(mockRegion.checkSplit()).thenThrow(new IndexOutOfBoundsException());
+
+ MetricsRegionWrapper metricsWrapper = new MetricsRegionWrapperImpl(r);
+
+ long preCompletedCount = metricsWrapper.getNumCompactionsCompleted();
+ long preFailedCount = metricsWrapper.getNumCompactionsFailed();
+
+ CountDownLatch latch = new CountDownLatch(1);
+ TrackableCompactionRequest request = new TrackableCompactionRequest(latch);
+ thread.requestCompaction(mockRegion, store, "test custom comapction", Store.PRIORITY_USER,
+ request, null);
+ // wait for the latch to complete.
+ latch.await(120, TimeUnit.SECONDS);
+
+ // compaction should have completed and been marked as failed due to error in split request
+ long postCompletedCount = metricsWrapper.getNumCompactionsCompleted();
+ long postFailedCount = metricsWrapper.getNumCompactionsFailed();
+
+ assertTrue("Completed count should have increased (pre=" + preCompletedCount +
+ ", post="+postCompletedCount+")",
+ postCompletedCount > preCompletedCount);
+ assertTrue("Failed count should have increased (pre=" + preFailedCount +
+ ", post=" + postFailedCount + ")",
+ postFailedCount > preFailedCount);
+ }
+
/**
* HBASE-7947: Regression test to ensure adding to the correct list in the
* {@link CompactSplitThread}
[4/9] hbase git commit: HBASE-16045 endtime argument for
VerifyReplication was incorrectly specified in usage
Posted by ap...@apache.org.
HBASE-16045 endtime argument for VerifyReplication was incorrectly specified in usage
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e18daa0c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e18daa0c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e18daa0c
Branch: refs/heads/0.98
Commit: e18daa0c9c1653c1d6c409bf990893798ce53cbf
Parents: 6a8206a
Author: tedyu <yu...@gmail.com>
Authored: Thu Jun 16 15:52:16 2016 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Fri Jun 24 11:04:24 2016 -0700
----------------------------------------------------------------------
.../hadoop/hbase/mapreduce/replication/VerifyReplication.java | 6 +++++-
src/main/asciidoc/_chapters/ops_mgt.adoc | 2 +-
2 files changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/e18daa0c/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
index 44bfdfe..a5d1ddd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
@@ -383,6 +383,10 @@ public class VerifyReplication extends Configured implements Tool {
continue;
}
+ if (cmd.startsWith("--")) {
+ printUsage("Invalid argument '" + cmd + "'");
+ }
+
if (i == args.length-2) {
peerId = cmd;
}
@@ -418,7 +422,7 @@ public class VerifyReplication extends Configured implements Tool {
System.err.println("ERROR: " + errorMsg);
}
System.err.println("Usage: verifyrep [--starttime=X]" +
- " [--stoptime=Y] [--families=A] [--row-prefixes=B] <peerid> <tablename>");
+ " [--endtime=Y] [--families=A] [--row-prefixes=B] <peerid> <tablename>");
System.err.println();
System.err.println("Options:");
System.err.println(" starttime beginning of the time range");
http://git-wip-us.apache.org/repos/asf/hbase/blob/e18daa0c/src/main/asciidoc/_chapters/ops_mgt.adoc
----------------------------------------------------------------------
diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc b/src/main/asciidoc/_chapters/ops_mgt.adoc
index d7ac987..fa31c8c 100644
--- a/src/main/asciidoc/_chapters/ops_mgt.adoc
+++ b/src/main/asciidoc/_chapters/ops_mgt.adoc
@@ -1358,7 +1358,7 @@ The `VerifyReplication` MapReduce job, which is included in HBase, performs a sy
+
[source,bash]
----
-$ HADOOP_CLASSPATH=`${HBASE_HOME}/bin/hbase classpath` "${HADOOP_HOME}/bin/hadoop" jar "${HBASE_HOME}/hbase-server-VERSION.jar" verifyrep --starttime=<timestamp> --stoptime=<timestamp> --families=<myFam> <ID> <tableName>
+$ HADOOP_CLASSPATH=`${HBASE_HOME}/bin/hbase classpath` "${HADOOP_HOME}/bin/hadoop" jar "${HBASE_HOME}/hbase-server-VERSION.jar" verifyrep --starttime=<timestamp> --endtime=<timestamp> --families=<myFam> <ID> <tableName>
----
+
The `VerifyReplication` command prints out `GOODROWS` and `BADROWS` counters to indicate rows that did and did not replicate correctly.
[2/9] hbase git commit: HBASE-15746 Remove extra RegionCoprocessor
preClose() in RSRpcServices#closeRegion (Stephen Yuan Jiang)
Posted by ap...@apache.org.
HBASE-15746 Remove extra RegionCoprocessor preClose() in RSRpcServices#closeRegion (Stephen Yuan Jiang)
Amending-Author: Andrew Purtell <ap...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/13dee1cf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/13dee1cf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/13dee1cf
Branch: refs/heads/0.98
Commit: 13dee1cf467e66147c24a7cff158eb6dedf9f3a9
Parents: 947e74e
Author: Stephen Yuan Jiang <sy...@gmail.com>
Authored: Tue Jun 14 13:30:41 2016 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Fri Jun 24 10:45:01 2016 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/regionserver/HRegionServer.java | 6 ------
1 file changed, 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/13dee1cf/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index 9df2a2c..d238fef 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -4129,12 +4129,6 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa
}
final String encodedRegionName = ProtobufUtil.getRegionEncodedName(request.getRegion());
- // Can be null if we're calling close on a region that's not online
- final HRegion region = this.getFromOnlineRegions(encodedRegionName);
- if ((region != null) && (region .getCoprocessorHost() != null)) {
- region.getCoprocessorHost().preClose(false);
- }
-
requestCount.increment();
LOG.info("Close " + encodedRegionName + ", via zk=" + (zk ? "yes" : "no") +
", znode version=" + versionOfClosingNode + ", on " + sn);
[8/9] hbase git commit: HBASE-16090 ResultScanner is not closed in
SyncTable#finishRemainingHashRanges()
Posted by ap...@apache.org.
HBASE-16090 ResultScanner is not closed in SyncTable#finishRemainingHashRanges()
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/099b3da9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/099b3da9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/099b3da9
Branch: refs/heads/0.98
Commit: 099b3da9e0b8dd023d0ec5933a901ff9bad6d267
Parents: 9cdf8c3
Author: tedyu <yu...@gmail.com>
Authored: Thu Jun 23 07:29:34 2016 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Fri Jun 24 11:24:57 2016 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/mapreduce/SyncTable.java | 13 ++++++++++---
1 file changed, 10 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/099b3da9/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
index 0c8f185..08ad1b0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
@@ -653,9 +653,16 @@ public class SyncTable extends Configured implements Tool {
scan.setStopRow(nextSourceKey.copyBytes());
}
- ResultScanner targetScanner = targetTable.getScanner(scan);
- for (Result row : targetScanner) {
- targetHasher.hashResult(row);
+ ResultScanner targetScanner = null;
+ try {
+ targetScanner = targetTable.getScanner(scan);
+ for (Result row : targetScanner) {
+ targetHasher.hashResult(row);
+ }
+ } finally {
+ if (targetScanner != null) {
+ targetScanner.close();
+ }
}
} // else current batch ends exactly at split end row
[6/9] hbase git commit: HBASE-15870 Specify columns in REST multi
gets (Matt Warhaftig)
Posted by ap...@apache.org.
HBASE-15870 Specify columns in REST multi gets (Matt Warhaftig)
Amending-Author: Andrew Purtell <ap...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0fd4c1dc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0fd4c1dc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0fd4c1dc
Branch: refs/heads/0.98
Commit: 0fd4c1dcdc00d70820d9e816cad0a43c6b039c11
Parents: ce13b59
Author: Jerry He <je...@apache.org>
Authored: Mon Jun 20 13:51:41 2016 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Fri Jun 24 11:17:48 2016 -0700
----------------------------------------------------------------------
.../hadoop/hbase/rest/MultiRowResource.java | 15 ++++++++-
.../apache/hadoop/hbase/rest/TableResource.java | 8 ++---
.../hadoop/hbase/rest/TestMultiRowResource.java | 34 ++++++++++++++++++++
3 files changed, 52 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/0fd4c1dc/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
index 8ff3ef6..9cafe27 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -42,6 +42,7 @@ public class MultiRowResource extends ResourceBase implements Constants {
TableResource tableResource;
Integer versions = null;
+ String[] columns = null;
/**
* Constructor
@@ -50,10 +51,15 @@ public class MultiRowResource extends ResourceBase implements Constants {
* @param versions
* @throws java.io.IOException
*/
- public MultiRowResource(TableResource tableResource, String versions) throws IOException {
+ public MultiRowResource(TableResource tableResource, String versions, String columnsStr)
+ throws IOException {
super();
this.tableResource = tableResource;
+ if (columnsStr != null && !columnsStr.equals("")) {
+ this.columns = columnsStr.split(",");
+ }
+
if (versions != null) {
this.versions = Integer.valueOf(versions);
@@ -74,6 +80,13 @@ public class MultiRowResource extends ResourceBase implements Constants {
if (this.versions != null) {
rowSpec.setMaxVersions(this.versions);
}
+
+ if (this.columns != null) {
+ for (int i = 0; i < this.columns.length; i++) {
+ rowSpec.addColumn(this.columns[i].getBytes());
+ }
+ }
+
ResultGenerator generator =
ResultGenerator.fromRowSpec(this.tableResource.getName(), rowSpec, null,
!params.containsKey(NOCACHE_PARAM_NAME));
http://git-wip-us.apache.org/repos/asf/hbase/blob/0fd4c1dc/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index bfbb549..b55d0e0 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -90,10 +90,10 @@ public class TableResource extends ResourceBase {
return new SchemaResource(this);
}
- @Path("multiget")
- public MultiRowResource getMultipleRowResource(
- final @QueryParam("v") String versions) throws IOException {
- return new MultiRowResource(this, versions);
+ @Path("{multiget: multiget.*}")
+ public MultiRowResource getMultipleRowResource(final @QueryParam("v") String versions,
+ @PathParam("multiget") String path) throws IOException {
+ return new MultiRowResource(this, versions, path.replace("multiget", "").replace("/", ""));
}
@Path("{rowspec: [^*]+}")
http://git-wip-us.apache.org/repos/asf/hbase/blob/0fd4c1dc/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
index dd54366..aacd3b0 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
@@ -155,6 +155,40 @@ public class TestMultiRowResource {
}
@Test
+ public void testMultiCellGetWithColsJSON() throws IOException, JAXBException {
+ String row_5_url = "/" + TABLE + "/" + ROW_1 + "/" + COLUMN_1;
+ String row_6_url = "/" + TABLE + "/" + ROW_2 + "/" + COLUMN_2;
+
+ StringBuilder path = new StringBuilder();
+ path.append("/");
+ path.append(TABLE);
+ path.append("/multiget");
+ path.append("/" + COLUMN_1 + "," + CFB);
+ path.append("?row=");
+ path.append(ROW_1);
+ path.append("&row=");
+ path.append(ROW_2);
+
+ client.post(row_5_url, Constants.MIMETYPE_BINARY, Bytes.toBytes(VALUE_1));
+ client.post(row_6_url, Constants.MIMETYPE_BINARY, Bytes.toBytes(VALUE_2));
+
+ Response response = client.get(path.toString(), Constants.MIMETYPE_JSON);
+ assertEquals(response.getCode(), 200);
+ ObjectMapper mapper =
+ new JacksonProvider().locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE);
+ CellSetModel cellSet = (CellSetModel) mapper.readValue(response.getBody(), CellSetModel.class);
+ assertEquals(2, cellSet.getRows().size());
+ assertEquals(ROW_1, Bytes.toString(cellSet.getRows().get(0).getKey()));
+ assertEquals(VALUE_1, Bytes.toString(cellSet.getRows().get(0).getCells().get(0).getValue()));
+ assertEquals(ROW_2, Bytes.toString(cellSet.getRows().get(1).getKey()));
+ assertEquals(VALUE_2, Bytes.toString(cellSet.getRows().get(1).getCells().get(0).getValue()));
+
+ client.delete(row_5_url);
+ client.delete(row_6_url);
+
+ }
+
+ @Test
public void testMultiCellGetJSONNotFound() throws IOException, JAXBException {
String row_5_url = "/" + TABLE + "/" + ROW_1 + "/" + COLUMN_1;
[3/9] hbase git commit: HBASE-16033 Add more details in logging of
responseTooSlow/TooLarge
Posted by ap...@apache.org.
HBASE-16033 Add more details in logging of responseTooSlow/TooLarge
Conflicts:
hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6a8206aa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6a8206aa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6a8206aa
Branch: refs/heads/0.98
Commit: 6a8206aaf704c7f8e96776d52d5cdf8e7328b0fe
Parents: 13dee1c
Author: Yu Li <li...@apache.org>
Authored: Thu Jun 16 16:40:38 2016 +0800
Committer: Andrew Purtell <ap...@apache.org>
Committed: Fri Jun 24 10:49:23 2016 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/ipc/RpcServer.java | 47 ++++----------------
1 file changed, 8 insertions(+), 39 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/6a8206aa/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
index 6ab976d..b3e9103 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
@@ -68,10 +68,7 @@ import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.Server;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Operation;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.exceptions.RegionMovedException;
import org.apache.hadoop.hbase.io.ByteBufferInputStream;
@@ -86,7 +83,6 @@ import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.AuthMethod;
import org.apache.hadoop.hbase.security.HBasePolicyProvider;
@@ -2220,13 +2216,9 @@ public class RpcServer implements RpcServerInterface {
if (tooSlow || tooLarge) {
// when tagging, we let TooLarge trump TooSmall to keep output simple
// note that large responses will often also be slow.
- StringBuilder buffer = new StringBuilder(256);
- buffer.append(md.getName());
- buffer.append("(");
- buffer.append(param.getClass().getName());
- buffer.append(")");
- logResponse(new Object[]{param},
- md.getName(), buffer.toString(), (tooLarge ? "TooLarge" : "TooSlow"),
+ logResponse(param,
+ md.getName(), md.getName() + "(" + param.getClass().getName() + ")",
+ (tooLarge ? "TooLarge" : "TooSlow"),
status.getClient(), startTime, processingTime, qTime,
responseSize);
}
@@ -2251,7 +2243,7 @@ public class RpcServer implements RpcServerInterface {
/**
* Logs an RPC response to the LOG file, producing valid JSON objects for
* client Operations.
- * @param params The parameters received in the call.
+ * @param param The parameters received in the call.
* @param methodName The name of the method invoked
* @param call The string representation of the call
* @param tag The tag that will be used to indicate this event in the log.
@@ -2262,7 +2254,7 @@ public class RpcServer implements RpcServerInterface {
* prior to being initiated, in ms.
* @param responseSize The size in bytes of the response buffer.
*/
- void logResponse(Object[] params, String methodName, String call, String tag,
+ void logResponse(Message param, String methodName, String call, String tag,
String clientAddress, long startTime, int processingTime, int qTime,
long responseSize)
throws IOException {
@@ -2275,32 +2267,9 @@ public class RpcServer implements RpcServerInterface {
responseInfo.put("client", clientAddress);
responseInfo.put("class", serverInstance == null? "": serverInstance.getClass().getSimpleName());
responseInfo.put("method", methodName);
- if (params.length == 2 && serverInstance instanceof HRegionServer &&
- params[0] instanceof byte[] &&
- params[1] instanceof Operation) {
- // if the slow process is a query, we want to log its table as well
- // as its own fingerprint
- TableName tableName = TableName.valueOf(
- HRegionInfo.parseRegionName((byte[]) params[0])[0]);
- responseInfo.put("table", tableName.getNameAsString());
- // annotate the response map with operation details
- responseInfo.putAll(((Operation) params[1]).toMap());
- // report to the log file
- LOG.warn("(operation" + tag + "): " +
- MAPPER.writeValueAsString(responseInfo));
- } else if (params.length == 1 && serverInstance instanceof HRegionServer &&
- params[0] instanceof Operation) {
- // annotate the response map with operation details
- responseInfo.putAll(((Operation) params[0]).toMap());
- // report to the log file
- LOG.warn("(operation" + tag + "): " +
- MAPPER.writeValueAsString(responseInfo));
- } else {
- // can't get JSON details, so just report call.toString() along with
- // a more generic tag.
- responseInfo.put("call", call);
- LOG.warn("(response" + tag + "): " + MAPPER.writeValueAsString(responseInfo));
- }
+ responseInfo.put("call", call);
+ responseInfo.put("param", ProtobufUtil.getShortTextFormat(param));
+ LOG.warn("(response" + tag + "): " + MAPPER.writeValueAsString(responseInfo));
}
/** Stops the service. No new calls will be handled after this is called. */
[7/9] hbase git commit: HBASE-16070 Mapreduce Serialization classes
do not have Interface audience (Ram)
Posted by ap...@apache.org.
HBASE-16070 Mapreduce Serialization classes do not have Interface audience
(Ram)
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9cdf8c36
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9cdf8c36
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9cdf8c36
Branch: refs/heads/0.98
Commit: 9cdf8c36b71ab1a30eca6da0eecd4ee2baad3a7a
Parents: 0fd4c1d
Author: Ramkrishna <ra...@intel.com>
Authored: Thu Jun 23 10:55:54 2016 +0530
Committer: Andrew Purtell <ap...@apache.org>
Committed: Fri Jun 24 11:24:33 2016 -0700
----------------------------------------------------------------------
.../apache/hadoop/hbase/mapreduce/KeyValueSerialization.java | 5 ++++-
.../apache/hadoop/hbase/mapreduce/MutationSerialization.java | 4 ++++
.../org/apache/hadoop/hbase/mapreduce/ResultSerialization.java | 4 ++++
3 files changed, 12 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/9cdf8c36/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java
index 192afd8..2b92439 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java
@@ -23,11 +23,14 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.io.serializer.Deserializer;
import org.apache.hadoop.io.serializer.Serialization;
import org.apache.hadoop.io.serializer.Serializer;
-
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class KeyValueSerialization implements Serialization<KeyValue> {
@Override
public boolean accept(Class<?> c) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/9cdf8c36/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java
index 4d200e8..ce2cf1a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java
@@ -21,6 +21,8 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
@@ -31,6 +33,8 @@ import org.apache.hadoop.io.serializer.Deserializer;
import org.apache.hadoop.io.serializer.Serialization;
import org.apache.hadoop.io.serializer.Serializer;
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class MutationSerialization implements Serialization<Mutation> {
@Override
public boolean accept(Class<?> c) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/9cdf8c36/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
index 19b12c5..a267ca9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
@@ -31,6 +31,8 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
@@ -39,6 +41,8 @@ import org.apache.hadoop.io.serializer.Deserializer;
import org.apache.hadoop.io.serializer.Serialization;
import org.apache.hadoop.io.serializer.Serializer;
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class ResultSerialization extends Configured implements Serialization<Result> {
private static final Log LOG = LogFactory.getLog(ResultSerialization.class);
// The following configuration property indicates import file format version.
[5/9] hbase git commit: HBASE-16061 Allow logging to a buffered
console
Posted by ap...@apache.org.
HBASE-16061 Allow logging to a buffered console
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ce13b595
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ce13b595
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ce13b595
Branch: refs/heads/0.98
Commit: ce13b5953b27fae8a16cf6293e95d2e88c96b8bc
Parents: e18daa0
Author: Elliott Clark <ec...@apache.org>
Authored: Fri Jun 17 16:32:57 2016 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Fri Jun 24 11:13:49 2016 -0700
----------------------------------------------------------------------
conf/log4j.properties | 5 +-
.../hadoop/hbase/AsyncConsoleAppender.java | 48 ++++++++++++++++++++
2 files changed, 52 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/ce13b595/conf/log4j.properties
----------------------------------------------------------------------
diff --git a/conf/log4j.properties b/conf/log4j.properties
index 1d7656b..6248a1b 100644
--- a/conf/log4j.properties
+++ b/conf/log4j.properties
@@ -80,13 +80,16 @@ log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
#
# console
-# Add "console" to rootlogger above if you want to use this
+# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
+log4j.appender.asyncconsole=org.apache.hadoop.hbase.AsyncConsoleAppender
+log4j.appender.asyncconsole.target=System.err
+
# Custom Logging levels
log4j.logger.org.apache.zookeeper=INFO
http://git-wip-us.apache.org/repos/asf/hbase/blob/ce13b595/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
new file mode 100644
index 0000000..338265d
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.log4j.AsyncAppender;
+import org.apache.log4j.ConsoleAppender;
+import org.apache.log4j.PatternLayout;
+
+/**
+ * Logger class that buffers before trying to log to the specified console.
+ */
+@InterfaceAudience.Private
+public class AsyncConsoleAppender extends AsyncAppender {
+ private final ConsoleAppender consoleAppender;
+
+ public AsyncConsoleAppender() {
+ super();
+ consoleAppender = new ConsoleAppender(new PatternLayout(
+ "%d{ISO8601} %-5p [%t] %c{2}: %m%n"));
+ this.addAppender(consoleAppender);
+ }
+
+ public void setTarget(String value) {
+ consoleAppender.setTarget(value);
+ }
+
+ public void activateOptions() {
+ consoleAppender.activateOptions();
+ super.activateOptions();
+ }
+
+}