You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ns...@apache.org on 2011/10/11 19:43:32 UTC
svn commit: r1181946 [1/3] - in /hbase/branches/0.89/src/main:
java/org/apache/hadoop/hbase/thrift/
java/org/apache/hadoop/hbase/thrift/generated/
resources/org/apache/hadoop/hbase/thrift/
Author: nspiegelberg
Date: Tue Oct 11 17:43:31 2011
New Revision: 1181946
URL: http://svn.apache.org/viewvc?rev=1181946&view=rev
Log:
Adding Thrift support for the Filter Language in the 89 branch
Summary:
This enables the Filter Language to now be used over Thrift in the 89 branch
Test Plan:
- TestThriftServer.java
- TestParseFilter.java
Ran a sample client program:
echo "Testing KeyOnlyFilter";
$result = $client->scannerOpenWithFilterString("conf", "KeyOnlyFilter()");
$to_print = $client->scannerGetList($result,1);
while ($to_print) {
print_r($to_print);
$to_print = $client->scannerGetList($result,1);
}
$client->scannerClose($result);
which gave results:
Testing KeyOnlyFilterArray
(
[0] => TRowResult Object
(
[row] => realtime
[columns] => Array
(
[conf:blacklist] => TCell Object
(
[value] =>
[timestamp] => 1314048458095
)
)
)
)
Reviewed By: liyintang
Reviewers: dgoode, pkhemani, liyintang
Commenters: dgoode
CC: kranganathan, dgoode, liyintang
Differential Revision: 310417
Added:
hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java
Modified:
hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java
hbase/branches/0.89/src/main/resources/org/apache/hadoop/hbase/thrift/Hbase.thrift
Modified: hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java?rev=1181946&r1=1181945&r2=1181946&view=diff
==============================================================================
--- hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java (original)
+++ hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java Tue Oct 11 17:43:31 2011
@@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.client.Sc
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
+import org.apache.hadoop.hbase.filter.ParseFilter;
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
import org.apache.hadoop.hbase.thrift.generated.BatchMutation;
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
@@ -57,6 +58,7 @@ import org.apache.hadoop.hbase.thrift.ge
import org.apache.hadoop.hbase.thrift.generated.TCell;
import org.apache.hadoop.hbase.thrift.generated.TRegionInfo;
import org.apache.hadoop.hbase.thrift.generated.TRowResult;
+import org.apache.hadoop.hbase.thrift.generated.TScan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
@@ -678,6 +680,42 @@ public class ThriftServer {
}
}
+ public int scannerOpenWithScan(byte [] tableName, TScan tScan) throws IOError {
+ try {
+ HTable table = getTable(tableName);
+ Scan scan = new Scan();
+ if (tScan.isSetStartRow()) {
+ scan.setStartRow(tScan.getStartRow());
+ }
+ if (tScan.isSetStopRow()) {
+ scan.setStopRow(tScan.getStopRow());
+ }
+ if (tScan.isSetTimestamp()) {
+ scan.setTimeRange(Long.MIN_VALUE, tScan.getTimestamp());
+ }
+ if (tScan.isSetCaching()) {
+ scan.setCaching(tScan.getCaching());
+ }
+ if(tScan.isSetColumns() && tScan.getColumns().size() != 0) {
+ for(byte [] column : tScan.getColumns()) {
+ byte [][] famQf = KeyValue.parseColumn(column);
+ if(famQf.length == 1) {
+ scan.addFamily(famQf[0]);
+ } else {
+ scan.addColumn(famQf[0], famQf[1]);
+ }
+ }
+ }
+ if (tScan.isSetFilterString()) {
+ ParseFilter parseFilter = new ParseFilter();
+ scan.setFilter(parseFilter.parseFilterString(tScan.getFilterString()));
+ }
+ return addScanner(table.getScanner(scan));
+ } catch (IOException e) {
+ throw new IOError(e.getMessage());
+ }
+ }
+
public int scannerOpenTs(byte[] tableName, byte[] startRow,
List<byte[]> columns, long timestamp) throws IOError, TException {
try {
@@ -724,6 +762,50 @@ public class ThriftServer {
}
}
+ @Override
+ public int scannerOpenWithFilterString(byte [] tableName,
+ byte [] filterString) throws IOError, TException {
+ return scannerOpenWithFilterStringTs(tableName, filterString, Long.MAX_VALUE);
+ }
+
+ @Override
+ public int scannerOpenWithFilterStringTs(byte [] tableName, byte [] filterString,
+ long timestamp) throws IOError, TException {
+ return scannerOpenWithStopAndFilterStringTs(tableName,
+ HConstants.EMPTY_START_ROW,
+ HConstants.EMPTY_END_ROW,
+ filterString, timestamp);
+ }
+
+ @Override
+ public int scannerOpenWithStopAndFilterString(byte [] tableName,
+ byte [] startRow, byte [] stopRow,
+ byte [] filterString)
+ throws IOError, TException {
+ return scannerOpenWithStopAndFilterStringTs(tableName, startRow,
+ HConstants.EMPTY_END_ROW,
+ filterString, Long.MAX_VALUE);
+ }
+
+ @Override
+ public int scannerOpenWithStopAndFilterStringTs(byte [] tableName, byte [] startRow,
+ byte [] stopRow, byte [] filterString,
+ long timestamp) throws IOError, TException {
+ try {
+ HTable table = getTable(tableName);
+ Scan scan = new Scan(startRow, stopRow);
+ scan.setTimeRange(Long.MIN_VALUE, timestamp);
+
+ if (filterString != null && filterString.length != 0) {
+ ParseFilter parseFilter = new ParseFilter();
+ scan.setFilter(parseFilter.parseFilterString(filterString));
+ }
+ return addScanner(table.getScanner(scan));
+ } catch (IOException e) {
+ throw new IOError(e.getMessage());
+ }
+ }
+
public Map<byte[], ColumnDescriptor> getColumnDescriptors(
byte[] tableName) throws IOError, TException {
try {