You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by sy...@apache.org on 2016/02/13 21:10:21 UTC
[10/22] hbase git commit: HBASE-15223 Make convertScanToString public
for Spark
HBASE-15223 Make convertScanToString public for Spark
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1942a99b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1942a99b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1942a99b
Branch: refs/heads/hbase-12439
Commit: 1942a99b831bb4c41c0e09d6b93df5e1d060f58e
Parents: abb6cdc
Author: Jerry He <je...@apache.org>
Authored: Wed Feb 10 15:02:58 2016 -0800
Committer: Jerry He <je...@apache.org>
Committed: Wed Feb 10 15:02:58 2016 -0800
----------------------------------------------------------------------
.../hadoop/hbase/mapreduce/TableInputFormatBase.java | 10 +++++-----
.../hadoop/hbase/mapreduce/TableMapReduceUtil.java | 4 ++--
.../org/apache/hadoop/hbase/mapreduce/TableSplit.java | 13 ++++++++++++-
3 files changed, 19 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/1942a99b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index 918232f..b2f115c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -266,7 +266,7 @@ extends InputFormat<ImmutableBytesWritable, Result> {
}
List<InputSplit> splits = new ArrayList<InputSplit>(1);
long regionSize = sizeCalculator.getRegionSize(regLoc.getRegionInfo().getRegionName());
- TableSplit split = new TableSplit(tableName,
+ TableSplit split = new TableSplit(tableName, scan,
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, regLoc
.getHostnamePort().split(Addressing.HOSTNAME_PORT_SEPARATOR)[0], regionSize);
splits.add(split);
@@ -309,7 +309,7 @@ extends InputFormat<ImmutableBytesWritable, Result> {
byte[] regionName = location.getRegionInfo().getRegionName();
long regionSize = sizeCalculator.getRegionSize(regionName);
- TableSplit split = new TableSplit(tableName,
+ TableSplit split = new TableSplit(tableName, scan,
splitStart, splitStop, regionLocation, regionSize);
splits.add(split);
if (LOG.isDebugEnabled()) {
@@ -397,9 +397,9 @@ extends InputFormat<ImmutableBytesWritable, Result> {
byte[] splitKey = getSplitKey(ts.getStartRow(), ts.getEndRow(), isTextKey);
//Set the size of child TableSplit as 1/2 of the region size. The exact size of the
// MapReduce input splits is not far off.
- TableSplit t1 = new TableSplit(tableName, ts.getStartRow(), splitKey, regionLocation,
+ TableSplit t1 = new TableSplit(tableName, scan, ts.getStartRow(), splitKey, regionLocation,
regionSize / 2);
- TableSplit t2 = new TableSplit(tableName, splitKey, ts.getEndRow(), regionLocation,
+ TableSplit t2 = new TableSplit(tableName, scan, splitKey, ts.getEndRow(), regionLocation,
regionSize - regionSize / 2);
resultList.add(t1);
resultList.add(t2);
@@ -426,7 +426,7 @@ extends InputFormat<ImmutableBytesWritable, Result> {
break;
}
}
- TableSplit t = new TableSplit(tableName, splitStartKey, splitEndKey,
+ TableSplit t = new TableSplit(tableName, scan, splitStartKey, splitEndKey,
regionLocation, totalSize);
resultList.add(t);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/1942a99b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
index d43c4d9..37e4e44 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
@@ -561,7 +561,7 @@ public class TableMapReduceUtil {
* @return The scan saved in a Base64 encoded string.
* @throws IOException When writing the scan fails.
*/
- static String convertScanToString(Scan scan) throws IOException {
+ public static String convertScanToString(Scan scan) throws IOException {
ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
return Base64.encodeBytes(proto.toByteArray());
}
@@ -573,7 +573,7 @@ public class TableMapReduceUtil {
* @return The newly created Scan instance.
* @throws IOException When reading the scan instance fails.
*/
- static Scan convertStringToScan(String base64) throws IOException {
+ public static Scan convertStringToScan(String base64) throws IOException {
byte [] decoded = Base64.decode(base64);
ClientProtos.Scan scan;
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/1942a99b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
index e8e59a2..850db81 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
@@ -301,7 +301,18 @@ implements Writable, Comparable<TableSplit> {
StringBuilder sb = new StringBuilder();
sb.append("HBase table split(");
sb.append("table name: ").append(tableName);
- sb.append(", scan: ").append(scan);
+ // null scan input is represented by ""
+ String printScan = "";
+ if (!scan.equals("")) {
+ try {
+ // get the real scan here in toString, not the Base64 string
+ printScan = TableMapReduceUtil.convertStringToScan(scan).toString();
+ }
+ catch (IOException e) {
+ printScan = "";
+ }
+ }
+ sb.append(", scan: ").append(printScan);
sb.append(", start row: ").append(Bytes.toStringBinary(startRow));
sb.append(", end row: ").append(Bytes.toStringBinary(endRow));
sb.append(", region location: ").append(regionLocation);