You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by je...@apache.org on 2016/02/10 23:56:00 UTC
hbase git commit: HBASE-15223 Make convertScanToString public for
Spark
Repository: hbase
Updated Branches:
refs/heads/branch-1 a34db9383 -> bc1d83673
HBASE-15223 Make convertScanToString public for Spark
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bc1d8367
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bc1d8367
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bc1d8367
Branch: refs/heads/branch-1
Commit: bc1d8367389e14724cb6d79f3b0abac69266f0b0
Parents: a34db93
Author: Jerry He <je...@apache.org>
Authored: Wed Feb 10 14:55:38 2016 -0800
Committer: Jerry He <je...@apache.org>
Committed: Wed Feb 10 14:55:38 2016 -0800
----------------------------------------------------------------------
.../hadoop/hbase/mapreduce/TableInputFormatBase.java | 14 +++++++-------
.../hadoop/hbase/mapreduce/TableMapReduceUtil.java | 4 ++--
.../org/apache/hadoop/hbase/mapreduce/TableSplit.java | 13 ++++++++++++-
.../hadoop/hbase/regionserver/RSRpcServices.java | 1 -
4 files changed, 21 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/bc1d8367/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index 82378d1..d72c177 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -264,7 +264,7 @@ extends InputFormat<ImmutableBytesWritable, Result> {
}
List<InputSplit> splits = new ArrayList<InputSplit>(1);
long regionSize = sizeCalculator.getRegionSize(regLoc.getRegionInfo().getRegionName());
- TableSplit split = new TableSplit(table.getName(),
+ TableSplit split = new TableSplit(table.getName(), scan,
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, regLoc
.getHostnamePort().split(Addressing.HOSTNAME_PORT_SEPARATOR)[0], regionSize);
splits.add(split);
@@ -307,7 +307,7 @@ extends InputFormat<ImmutableBytesWritable, Result> {
byte[] regionName = location.getRegionInfo().getRegionName();
long regionSize = sizeCalculator.getRegionSize(regionName);
- TableSplit split = new TableSplit(table.getName(),
+ TableSplit split = new TableSplit(table.getName(), scan,
splitStart, splitStop, regionLocation, regionSize);
splits.add(split);
if (LOG.isDebugEnabled()) {
@@ -398,10 +398,10 @@ extends InputFormat<ImmutableBytesWritable, Result> {
byte[] splitKey = getSplitKey(ts.getStartRow(), ts.getEndRow(), isTextKey);
//Set the size of child TableSplit as 1/2 of the region size. The exact size of the
// MapReduce input splits is not far off.
- TableSplit t1 = new TableSplit(table.getName(), ts.getStartRow(), splitKey, regionLocation,
- regionSize / 2);
- TableSplit t2 = new TableSplit(table.getName(), splitKey, ts.getEndRow(), regionLocation,
- regionSize - regionSize / 2);
+ TableSplit t1 = new TableSplit(table.getName(), scan, ts.getStartRow(), splitKey,
+ regionLocation, regionSize / 2);
+ TableSplit t2 = new TableSplit(table.getName(), scan, splitKey, ts.getEndRow(),
+ regionLocation, regionSize - regionSize / 2);
resultList.add(t1);
resultList.add(t2);
count++;
@@ -427,7 +427,7 @@ extends InputFormat<ImmutableBytesWritable, Result> {
break;
}
}
- TableSplit t = new TableSplit(table.getName(), splitStartKey, splitEndKey,
+ TableSplit t = new TableSplit(table.getName(), scan, splitStartKey, splitEndKey,
regionLocation, totalSize);
resultList.add(t);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/bc1d8367/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
index 8cad7ab..458464f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
@@ -552,7 +552,7 @@ public class TableMapReduceUtil {
* @return The scan saved in a Base64 encoded string.
* @throws IOException When writing the scan fails.
*/
- static String convertScanToString(Scan scan) throws IOException {
+ public static String convertScanToString(Scan scan) throws IOException {
ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
return Base64.encodeBytes(proto.toByteArray());
}
@@ -564,7 +564,7 @@ public class TableMapReduceUtil {
* @return The newly created Scan instance.
* @throws IOException When reading the scan instance fails.
*/
- static Scan convertStringToScan(String base64) throws IOException {
+ public static Scan convertStringToScan(String base64) throws IOException {
byte [] decoded = Base64.decode(base64);
ClientProtos.Scan scan;
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/bc1d8367/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
index 7111668..bf8532b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
@@ -325,7 +325,18 @@ implements Writable, Comparable<TableSplit> {
StringBuilder sb = new StringBuilder();
sb.append("HBase table split(");
sb.append("table name: ").append(tableName);
- sb.append(", scan: ").append(scan);
+ // null scan input is represented by ""
+ String printScan = "";
+ if (!scan.equals("")) {
+ try {
+ // get the real scan here in toString, not the Base64 string
+ printScan = TableMapReduceUtil.convertStringToScan(scan).toString();
+ }
+ catch (IOException e) {
+ printScan = "";
+ }
+ }
+ sb.append(", scan: ").append(printScan);
sb.append(", start row: ").append(Bytes.toStringBinary(startRow));
sb.append(", end row: ").append(Bytes.toStringBinary(endRow));
sb.append(", region location: ").append(regionLocation);
http://git-wip-us.apache.org/repos/asf/hbase/blob/bc1d8367/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index f9b2b7b..eff9060 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -2133,7 +2133,6 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
RegionActionResult.Builder regionActionResultBuilder = RegionActionResult.newBuilder();
Boolean processed = null;
- RpcCallContext context = RpcServer.getCurrentCall();
this.rpcMultiRequestCount.increment();
for (RegionAction regionAction : request.getRegionActionList()) {
this.requestCount.add(regionAction.getActionCount());