You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by la...@apache.org on 2013/07/02 14:17:26 UTC
svn commit: r1498900 - in /hbase/branches/0.94/src:
main/java/org/apache/hadoop/hbase/thrift2/
main/java/org/apache/hadoop/hbase/thrift2/generated/
main/resources/org/apache/hadoop/hbase/thrift2/
test/java/org/apache/hadoop/hbase/thrift2/
Author: larsgeorge
Date: Tue Jul 2 12:17:25 2013
New Revision: 1498900
URL: http://svn.apache.org/r1498900
Log:
HBASE-8774 Add BatchSize and Filter to Thrift2 (Hamed Madani)
Modified:
hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java
hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java
hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java
hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java
hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java
hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java
hbase/branches/0.94/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift
hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java
Modified: hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java?rev=1498900&r1=1498899&r2=1498900&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java (original)
+++ hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java Tue Jul 2 12:17:25 2013
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.thrift2;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.filter.ParseFilter;
import org.apache.hadoop.hbase.thrift2.generated.*;
import java.io.IOException;
@@ -71,6 +72,11 @@ public class ThriftUtilities {
}
}
+ if (in.isSetFilterString()) {
+ ParseFilter parseFilter = new ParseFilter();
+ out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
+ }
+
return out;
}
@@ -322,6 +328,15 @@ public class ThriftUtilities {
out.setTimeRange(timeRange.getMinStamp(), timeRange.getMaxStamp());
}
+ if (in.isSetBatchSize()) {
+ out.setBatch(in.getBatchSize());
+ }
+
+ if (in.isSetFilterString()) {
+ ParseFilter parseFilter = new ParseFilter();
+ out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
+ }
+
return out;
}
Modified: hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java?rev=1498900&r1=1498899&r2=1498900&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java (original)
+++ hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java Tue Jul 2 12:17:25 2013
@@ -709,7 +709,7 @@ public class TDelete implements org.apac
struct.columns = new ArrayList<TColumn>(_list24.size);
for (int _i25 = 0; _i25 < _list24.size; ++_i25)
{
- TColumn _elem26; // optional
+ TColumn _elem26; // required
_elem26 = new TColumn();
_elem26.read(iprot);
struct.columns.add(_elem26);
@@ -860,7 +860,7 @@ public class TDelete implements org.apac
struct.columns = new ArrayList<TColumn>(_list29.size);
for (int _i30 = 0; _i30 < _list29.size; ++_i30)
{
- TColumn _elem31; // optional
+ TColumn _elem31; // required
_elem31 = new TColumn();
_elem31.read(iprot);
struct.columns.add(_elem31);
Modified: hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java?rev=1498900&r1=1498899&r2=1498900&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java (original)
+++ hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java Tue Jul 2 12:17:25 2013
@@ -39,8 +39,6 @@ import org.slf4j.LoggerFactory;
*
* If you specify a time range and a timestamp the range is ignored.
* Timestamps on TColumns are ignored.
- *
- * TODO: Filter, Locks
*/
public class TGet implements org.apache.thrift.TBase<TGet, TGet._Fields>, java.io.Serializable, Cloneable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGet");
@@ -50,6 +48,7 @@ public class TGet implements org.apache.
private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)3);
private static final org.apache.thrift.protocol.TField TIME_RANGE_FIELD_DESC = new org.apache.thrift.protocol.TField("timeRange", org.apache.thrift.protocol.TType.STRUCT, (short)4);
private static final org.apache.thrift.protocol.TField MAX_VERSIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("maxVersions", org.apache.thrift.protocol.TType.I32, (short)5);
+ private static final org.apache.thrift.protocol.TField FILTER_STRING_FIELD_DESC = new org.apache.thrift.protocol.TField("filterString", org.apache.thrift.protocol.TType.STRING, (short)6);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
@@ -62,6 +61,7 @@ public class TGet implements org.apache.
public long timestamp; // optional
public TTimeRange timeRange; // optional
public int maxVersions; // optional
+ public ByteBuffer filterString; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -69,7 +69,8 @@ public class TGet implements org.apache.
COLUMNS((short)2, "columns"),
TIMESTAMP((short)3, "timestamp"),
TIME_RANGE((short)4, "timeRange"),
- MAX_VERSIONS((short)5, "maxVersions");
+ MAX_VERSIONS((short)5, "maxVersions"),
+ FILTER_STRING((short)6, "filterString");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
@@ -94,6 +95,8 @@ public class TGet implements org.apache.
return TIME_RANGE;
case 5: // MAX_VERSIONS
return MAX_VERSIONS;
+ case 6: // FILTER_STRING
+ return FILTER_STRING;
default:
return null;
}
@@ -137,7 +140,7 @@ public class TGet implements org.apache.
private static final int __TIMESTAMP_ISSET_ID = 0;
private static final int __MAXVERSIONS_ISSET_ID = 1;
private BitSet __isset_bit_vector = new BitSet(2);
- private _Fields optionals[] = {_Fields.COLUMNS,_Fields.TIMESTAMP,_Fields.TIME_RANGE,_Fields.MAX_VERSIONS};
+ private _Fields optionals[] = {_Fields.COLUMNS,_Fields.TIMESTAMP,_Fields.TIME_RANGE,_Fields.MAX_VERSIONS,_Fields.FILTER_STRING};
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -152,6 +155,8 @@ public class TGet implements org.apache.
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TTimeRange.class)));
tmpMap.put(_Fields.MAX_VERSIONS, new org.apache.thrift.meta_data.FieldMetaData("maxVersions", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+ tmpMap.put(_Fields.FILTER_STRING, new org.apache.thrift.meta_data.FieldMetaData("filterString", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TGet.class, metaDataMap);
}
@@ -188,6 +193,10 @@ public class TGet implements org.apache.
this.timeRange = new TTimeRange(other.timeRange);
}
this.maxVersions = other.maxVersions;
+ if (other.isSetFilterString()) {
+ this.filterString = org.apache.thrift.TBaseHelper.copyBinary(other.filterString);
+;
+ }
}
public TGet deepCopy() {
@@ -203,6 +212,7 @@ public class TGet implements org.apache.
this.timeRange = null;
setMaxVersionsIsSet(false);
this.maxVersions = 0;
+ this.filterString = null;
}
public byte[] getRow() {
@@ -348,6 +358,40 @@ public class TGet implements org.apache.
__isset_bit_vector.set(__MAXVERSIONS_ISSET_ID, value);
}
+ public byte[] getFilterString() {
+ setFilterString(org.apache.thrift.TBaseHelper.rightSize(filterString));
+ return filterString == null ? null : filterString.array();
+ }
+
+ public ByteBuffer bufferForFilterString() {
+ return filterString;
+ }
+
+ public TGet setFilterString(byte[] filterString) {
+ setFilterString(filterString == null ? (ByteBuffer)null : ByteBuffer.wrap(filterString));
+ return this;
+ }
+
+ public TGet setFilterString(ByteBuffer filterString) {
+ this.filterString = filterString;
+ return this;
+ }
+
+ public void unsetFilterString() {
+ this.filterString = null;
+ }
+
+ /** Returns true if field filterString is set (has been assigned a value) and false otherwise */
+ public boolean isSetFilterString() {
+ return this.filterString != null;
+ }
+
+ public void setFilterStringIsSet(boolean value) {
+ if (!value) {
+ this.filterString = null;
+ }
+ }
+
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case ROW:
@@ -390,6 +434,14 @@ public class TGet implements org.apache.
}
break;
+ case FILTER_STRING:
+ if (value == null) {
+ unsetFilterString();
+ } else {
+ setFilterString((ByteBuffer)value);
+ }
+ break;
+
}
}
@@ -410,6 +462,9 @@ public class TGet implements org.apache.
case MAX_VERSIONS:
return Integer.valueOf(getMaxVersions());
+ case FILTER_STRING:
+ return getFilterString();
+
}
throw new IllegalStateException();
}
@@ -431,6 +486,8 @@ public class TGet implements org.apache.
return isSetTimeRange();
case MAX_VERSIONS:
return isSetMaxVersions();
+ case FILTER_STRING:
+ return isSetFilterString();
}
throw new IllegalStateException();
}
@@ -493,6 +550,15 @@ public class TGet implements org.apache.
return false;
}
+ boolean this_present_filterString = true && this.isSetFilterString();
+ boolean that_present_filterString = true && that.isSetFilterString();
+ if (this_present_filterString || that_present_filterString) {
+ if (!(this_present_filterString && that_present_filterString))
+ return false;
+ if (!this.filterString.equals(that.filterString))
+ return false;
+ }
+
return true;
}
@@ -559,6 +625,16 @@ public class TGet implements org.apache.
return lastComparison;
}
}
+ lastComparison = Boolean.valueOf(isSetFilterString()).compareTo(typedOther.isSetFilterString());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetFilterString()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.filterString, typedOther.filterString);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
return 0;
}
@@ -618,6 +694,16 @@ public class TGet implements org.apache.
sb.append(this.maxVersions);
first = false;
}
+ if (isSetFilterString()) {
+ if (!first) sb.append(", ");
+ sb.append("filterString:");
+ if (this.filterString == null) {
+ sb.append("null");
+ } else {
+ org.apache.thrift.TBaseHelper.toString(this.filterString, sb);
+ }
+ first = false;
+ }
sb.append(")");
return sb.toString();
}
@@ -680,7 +766,7 @@ public class TGet implements org.apache.
struct.columns = new ArrayList<TColumn>(_list8.size);
for (int _i9 = 0; _i9 < _list8.size; ++_i9)
{
- TColumn _elem10; // optional
+ TColumn _elem10; // required
_elem10 = new TColumn();
_elem10.read(iprot);
struct.columns.add(_elem10);
@@ -717,6 +803,14 @@ public class TGet implements org.apache.
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
+ case 6: // FILTER_STRING
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.filterString = iprot.readBinary();
+ struct.setFilterStringIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
@@ -768,6 +862,13 @@ public class TGet implements org.apache.
oprot.writeI32(struct.maxVersions);
oprot.writeFieldEnd();
}
+ if (struct.filterString != null) {
+ if (struct.isSetFilterString()) {
+ oprot.writeFieldBegin(FILTER_STRING_FIELD_DESC);
+ oprot.writeBinary(struct.filterString);
+ oprot.writeFieldEnd();
+ }
+ }
oprot.writeFieldStop();
oprot.writeStructEnd();
}
@@ -799,7 +900,10 @@ public class TGet implements org.apache.
if (struct.isSetMaxVersions()) {
optionals.set(3);
}
- oprot.writeBitSet(optionals, 4);
+ if (struct.isSetFilterString()) {
+ optionals.set(4);
+ }
+ oprot.writeBitSet(optionals, 5);
if (struct.isSetColumns()) {
{
oprot.writeI32(struct.columns.size());
@@ -818,6 +922,9 @@ public class TGet implements org.apache.
if (struct.isSetMaxVersions()) {
oprot.writeI32(struct.maxVersions);
}
+ if (struct.isSetFilterString()) {
+ oprot.writeBinary(struct.filterString);
+ }
}
@Override
@@ -825,14 +932,14 @@ public class TGet implements org.apache.
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.row = iprot.readBinary();
struct.setRowIsSet(true);
- BitSet incoming = iprot.readBitSet(4);
+ BitSet incoming = iprot.readBitSet(5);
if (incoming.get(0)) {
{
org.apache.thrift.protocol.TList _list13 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
struct.columns = new ArrayList<TColumn>(_list13.size);
for (int _i14 = 0; _i14 < _list13.size; ++_i14)
{
- TColumn _elem15; // optional
+ TColumn _elem15; // required
_elem15 = new TColumn();
_elem15.read(iprot);
struct.columns.add(_elem15);
@@ -853,6 +960,10 @@ public class TGet implements org.apache.
struct.maxVersions = iprot.readI32();
struct.setMaxVersionsIsSet(true);
}
+ if (incoming.get(4)) {
+ struct.filterString = iprot.readBinary();
+ struct.setFilterStringIsSet(true);
+ }
}
}
Modified: hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java?rev=1498900&r1=1498899&r2=1498900&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java (original)
+++ hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java Tue Jul 2 12:17:25 2013
@@ -2207,6 +2207,8 @@ public class THBaseService {
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bit_vector = new BitSet(1);
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
@@ -3704,7 +3706,7 @@ public class THBaseService {
struct.gets = new ArrayList<TGet>(_list48.size);
for (int _i49 = 0; _i49 < _list48.size; ++_i49)
{
- TGet _elem50; // optional
+ TGet _elem50; // required
_elem50 = new TGet();
_elem50.read(iprot);
struct.gets.add(_elem50);
@@ -3785,7 +3787,7 @@ public class THBaseService {
struct.gets = new ArrayList<TGet>(_list53.size);
for (int _i54 = 0; _i54 < _list53.size; ++_i54)
{
- TGet _elem55; // optional
+ TGet _elem55; // required
_elem55 = new TGet();
_elem55.read(iprot);
struct.gets.add(_elem55);
@@ -4186,7 +4188,7 @@ public class THBaseService {
struct.success = new ArrayList<TResult>(_list56.size);
for (int _i57 = 0; _i57 < _list56.size; ++_i57)
{
- TResult _elem58; // optional
+ TResult _elem58; // required
_elem58 = new TResult();
_elem58.read(iprot);
struct.success.add(_elem58);
@@ -4288,7 +4290,7 @@ public class THBaseService {
struct.success = new ArrayList<TResult>(_list61.size);
for (int _i62 = 0; _i62 < _list61.size; ++_i62)
{
- TResult _elem63; // optional
+ TResult _elem63; // required
_elem63 = new TResult();
_elem63.read(iprot);
struct.success.add(_elem63);
@@ -6447,6 +6449,8 @@ public class THBaseService {
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bit_vector = new BitSet(1);
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
@@ -7000,7 +7004,7 @@ public class THBaseService {
struct.puts = new ArrayList<TPut>(_list64.size);
for (int _i65 = 0; _i65 < _list64.size; ++_i65)
{
- TPut _elem66; // optional
+ TPut _elem66; // required
_elem66 = new TPut();
_elem66.read(iprot);
struct.puts.add(_elem66);
@@ -7081,7 +7085,7 @@ public class THBaseService {
struct.puts = new ArrayList<TPut>(_list69.size);
for (int _i70 = 0; _i70 < _list69.size; ++_i70)
{
- TPut _elem71; // optional
+ TPut _elem71; // required
_elem71 = new TPut();
_elem71.read(iprot);
struct.puts.add(_elem71);
@@ -8720,7 +8724,7 @@ public class THBaseService {
struct.deletes = new ArrayList<TDelete>(_list72.size);
for (int _i73 = 0; _i73 < _list72.size; ++_i73)
{
- TDelete _elem74; // optional
+ TDelete _elem74; // required
_elem74 = new TDelete();
_elem74.read(iprot);
struct.deletes.add(_elem74);
@@ -8801,7 +8805,7 @@ public class THBaseService {
struct.deletes = new ArrayList<TDelete>(_list77.size);
for (int _i78 = 0; _i78 < _list77.size; ++_i78)
{
- TDelete _elem79; // optional
+ TDelete _elem79; // required
_elem79 = new TDelete();
_elem79.read(iprot);
struct.deletes.add(_elem79);
@@ -9202,7 +9206,7 @@ public class THBaseService {
struct.success = new ArrayList<TDelete>(_list80.size);
for (int _i81 = 0; _i81 < _list80.size; ++_i81)
{
- TDelete _elem82; // optional
+ TDelete _elem82; // required
_elem82 = new TDelete();
_elem82.read(iprot);
struct.success.add(_elem82);
@@ -9304,7 +9308,7 @@ public class THBaseService {
struct.success = new ArrayList<TDelete>(_list85.size);
for (int _i86 = 0; _i86 < _list85.size; ++_i86)
{
- TDelete _elem87; // optional
+ TDelete _elem87; // required
_elem87 = new TDelete();
_elem87.read(iprot);
struct.success.add(_elem87);
@@ -10629,6 +10633,8 @@ public class THBaseService {
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bit_vector = new BitSet(1);
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
@@ -12496,6 +12502,8 @@ public class THBaseService {
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bit_vector = new BitSet(1);
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
@@ -13558,7 +13566,7 @@ public class THBaseService {
struct.success = new ArrayList<TResult>(_list88.size);
for (int _i89 = 0; _i89 < _list88.size; ++_i89)
{
- TResult _elem90; // optional
+ TResult _elem90; // required
_elem90 = new TResult();
_elem90.read(iprot);
struct.success.add(_elem90);
@@ -13680,7 +13688,7 @@ public class THBaseService {
struct.success = new ArrayList<TResult>(_list93.size);
for (int _i94 = 0; _i94 < _list93.size; ++_i94)
{
- TResult _elem95; // optional
+ TResult _elem95; // required
_elem95 = new TResult();
_elem95.read(iprot);
struct.success.add(_elem95);
Modified: hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java?rev=1498900&r1=1498899&r2=1498900&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java (original)
+++ hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java Tue Jul 2 12:17:25 2013
@@ -527,7 +527,7 @@ public class TIncrement implements org.a
struct.columns = new ArrayList<TColumnIncrement>(_list32.size);
for (int _i33 = 0; _i33 < _list32.size; ++_i33)
{
- TColumnIncrement _elem34; // optional
+ TColumnIncrement _elem34; // required
_elem34 = new TColumnIncrement();
_elem34.read(iprot);
struct.columns.add(_elem34);
@@ -629,7 +629,7 @@ public class TIncrement implements org.a
struct.columns = new ArrayList<TColumnIncrement>(_list37.size);
for (int _i38 = 0; _i38 < _list37.size; ++_i38)
{
- TColumnIncrement _elem39; // optional
+ TColumnIncrement _elem39; // required
_elem39 = new TColumnIncrement();
_elem39.read(iprot);
struct.columns.add(_elem39);
Modified: hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java?rev=1498900&r1=1498899&r2=1498900&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java (original)
+++ hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java Tue Jul 2 12:17:25 2013
@@ -604,7 +604,7 @@ public class TPut implements org.apache.
struct.columnValues = new ArrayList<TColumnValue>(_list16.size);
for (int _i17 = 0; _i17 < _list16.size; ++_i17)
{
- TColumnValue _elem18; // optional
+ TColumnValue _elem18; // required
_elem18 = new TColumnValue();
_elem18.read(iprot);
struct.columnValues.add(_elem18);
@@ -725,7 +725,7 @@ public class TPut implements org.apache.
struct.columnValues = new ArrayList<TColumnValue>(_list21.size);
for (int _i22 = 0; _i22 < _list21.size; ++_i22)
{
- TColumnValue _elem23; // optional
+ TColumnValue _elem23; // required
_elem23 = new TColumnValue();
_elem23.read(iprot);
struct.columnValues.add(_elem23);
Modified: hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java?rev=1498900&r1=1498899&r2=1498900&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java (original)
+++ hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java Tue Jul 2 12:17:25 2013
@@ -442,7 +442,7 @@ public class TResult implements org.apac
struct.columnValues = new ArrayList<TColumnValue>(_list0.size);
for (int _i1 = 0; _i1 < _list0.size; ++_i1)
{
- TColumnValue _elem2; // optional
+ TColumnValue _elem2; // required
_elem2 = new TColumnValue();
_elem2.read(iprot);
struct.columnValues.add(_elem2);
@@ -530,7 +530,7 @@ public class TResult implements org.apac
struct.columnValues = new ArrayList<TColumnValue>(_list5.size);
for (int _i6 = 0; _i6 < _list5.size; ++_i6)
{
- TColumnValue _elem7; // optional
+ TColumnValue _elem7; // required
_elem7 = new TColumnValue();
_elem7.read(iprot);
struct.columnValues.add(_elem7);
Modified: hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java?rev=1498900&r1=1498899&r2=1498900&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java (original)
+++ hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java Tue Jul 2 12:17:25 2013
@@ -40,6 +40,8 @@ public class TScan implements org.apache
private static final org.apache.thrift.protocol.TField CACHING_FIELD_DESC = new org.apache.thrift.protocol.TField("caching", org.apache.thrift.protocol.TType.I32, (short)4);
private static final org.apache.thrift.protocol.TField MAX_VERSIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("maxVersions", org.apache.thrift.protocol.TType.I32, (short)5);
private static final org.apache.thrift.protocol.TField TIME_RANGE_FIELD_DESC = new org.apache.thrift.protocol.TField("timeRange", org.apache.thrift.protocol.TType.STRUCT, (short)6);
+ private static final org.apache.thrift.protocol.TField FILTER_STRING_FIELD_DESC = new org.apache.thrift.protocol.TField("filterString", org.apache.thrift.protocol.TType.STRING, (short)7);
+ private static final org.apache.thrift.protocol.TField BATCH_SIZE_FIELD_DESC = new org.apache.thrift.protocol.TField("batchSize", org.apache.thrift.protocol.TType.I32, (short)8);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
@@ -53,6 +55,8 @@ public class TScan implements org.apache
public int caching; // optional
public int maxVersions; // optional
public TTimeRange timeRange; // optional
+ public ByteBuffer filterString; // optional
+ public int batchSize; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -61,7 +65,9 @@ public class TScan implements org.apache
COLUMNS((short)3, "columns"),
CACHING((short)4, "caching"),
MAX_VERSIONS((short)5, "maxVersions"),
- TIME_RANGE((short)6, "timeRange");
+ TIME_RANGE((short)6, "timeRange"),
+ FILTER_STRING((short)7, "filterString"),
+ BATCH_SIZE((short)8, "batchSize");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
@@ -88,6 +94,10 @@ public class TScan implements org.apache
return MAX_VERSIONS;
case 6: // TIME_RANGE
return TIME_RANGE;
+ case 7: // FILTER_STRING
+ return FILTER_STRING;
+ case 8: // BATCH_SIZE
+ return BATCH_SIZE;
default:
return null;
}
@@ -130,8 +140,9 @@ public class TScan implements org.apache
// isset id assignments
private static final int __CACHING_ISSET_ID = 0;
private static final int __MAXVERSIONS_ISSET_ID = 1;
- private BitSet __isset_bit_vector = new BitSet(2);
- private _Fields optionals[] = {_Fields.START_ROW,_Fields.STOP_ROW,_Fields.COLUMNS,_Fields.CACHING,_Fields.MAX_VERSIONS,_Fields.TIME_RANGE};
+ private static final int __BATCHSIZE_ISSET_ID = 2;
+ private BitSet __isset_bit_vector = new BitSet(3);
+ private _Fields optionals[] = {_Fields.START_ROW,_Fields.STOP_ROW,_Fields.COLUMNS,_Fields.CACHING,_Fields.MAX_VERSIONS,_Fields.TIME_RANGE,_Fields.FILTER_STRING,_Fields.BATCH_SIZE};
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -148,6 +159,10 @@ public class TScan implements org.apache
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.TIME_RANGE, new org.apache.thrift.meta_data.FieldMetaData("timeRange", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TTimeRange.class)));
+ tmpMap.put(_Fields.FILTER_STRING, new org.apache.thrift.meta_data.FieldMetaData("filterString", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)));
+ tmpMap.put(_Fields.BATCH_SIZE, new org.apache.thrift.meta_data.FieldMetaData("batchSize", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TScan.class, metaDataMap);
}
@@ -183,6 +198,11 @@ public class TScan implements org.apache
if (other.isSetTimeRange()) {
this.timeRange = new TTimeRange(other.timeRange);
}
+ if (other.isSetFilterString()) {
+ this.filterString = org.apache.thrift.TBaseHelper.copyBinary(other.filterString);
+;
+ }
+ this.batchSize = other.batchSize;
}
public TScan deepCopy() {
@@ -199,6 +219,9 @@ public class TScan implements org.apache
this.maxVersions = 1;
this.timeRange = null;
+ this.filterString = null;
+ setBatchSizeIsSet(false);
+ this.batchSize = 0;
}
public byte[] getStartRow() {
@@ -378,6 +401,63 @@ public class TScan implements org.apache
}
}
+ public byte[] getFilterString() {
+ setFilterString(org.apache.thrift.TBaseHelper.rightSize(filterString));
+ return filterString == null ? null : filterString.array();
+ }
+
+ public ByteBuffer bufferForFilterString() {
+ return filterString;
+ }
+
+ public TScan setFilterString(byte[] filterString) {
+ setFilterString(filterString == null ? (ByteBuffer)null : ByteBuffer.wrap(filterString));
+ return this;
+ }
+
+ public TScan setFilterString(ByteBuffer filterString) {
+ this.filterString = filterString;
+ return this;
+ }
+
+ public void unsetFilterString() {
+ this.filterString = null;
+ }
+
+ /** Returns true if field filterString is set (has been assigned a value) and false otherwise */
+ public boolean isSetFilterString() {
+ return this.filterString != null;
+ }
+
+ public void setFilterStringIsSet(boolean value) {
+ if (!value) {
+ this.filterString = null;
+ }
+ }
+
+ public int getBatchSize() {
+ return this.batchSize;
+ }
+
+ public TScan setBatchSize(int batchSize) {
+ this.batchSize = batchSize;
+ setBatchSizeIsSet(true);
+ return this;
+ }
+
+ public void unsetBatchSize() {
+ __isset_bit_vector.clear(__BATCHSIZE_ISSET_ID);
+ }
+
+ /** Returns true if field batchSize is set (has been assigned a value) and false otherwise */
+ public boolean isSetBatchSize() {
+ return __isset_bit_vector.get(__BATCHSIZE_ISSET_ID);
+ }
+
+ public void setBatchSizeIsSet(boolean value) {
+ __isset_bit_vector.set(__BATCHSIZE_ISSET_ID, value);
+ }
+
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case START_ROW:
@@ -428,6 +508,22 @@ public class TScan implements org.apache
}
break;
+ case FILTER_STRING:
+ if (value == null) {
+ unsetFilterString();
+ } else {
+ setFilterString((ByteBuffer)value);
+ }
+ break;
+
+ case BATCH_SIZE:
+ if (value == null) {
+ unsetBatchSize();
+ } else {
+ setBatchSize((Integer)value);
+ }
+ break;
+
}
}
@@ -451,6 +547,12 @@ public class TScan implements org.apache
case TIME_RANGE:
return getTimeRange();
+ case FILTER_STRING:
+ return getFilterString();
+
+ case BATCH_SIZE:
+ return Integer.valueOf(getBatchSize());
+
}
throw new IllegalStateException();
}
@@ -474,6 +576,10 @@ public class TScan implements org.apache
return isSetMaxVersions();
case TIME_RANGE:
return isSetTimeRange();
+ case FILTER_STRING:
+ return isSetFilterString();
+ case BATCH_SIZE:
+ return isSetBatchSize();
}
throw new IllegalStateException();
}
@@ -545,6 +651,24 @@ public class TScan implements org.apache
return false;
}
+ boolean this_present_filterString = true && this.isSetFilterString();
+ boolean that_present_filterString = true && that.isSetFilterString();
+ if (this_present_filterString || that_present_filterString) {
+ if (!(this_present_filterString && that_present_filterString))
+ return false;
+ if (!this.filterString.equals(that.filterString))
+ return false;
+ }
+
+ boolean this_present_batchSize = true && this.isSetBatchSize();
+ boolean that_present_batchSize = true && that.isSetBatchSize();
+ if (this_present_batchSize || that_present_batchSize) {
+ if (!(this_present_batchSize && that_present_batchSize))
+ return false;
+ if (this.batchSize != that.batchSize)
+ return false;
+ }
+
return true;
}
@@ -621,6 +745,26 @@ public class TScan implements org.apache
return lastComparison;
}
}
+ lastComparison = Boolean.valueOf(isSetFilterString()).compareTo(typedOther.isSetFilterString());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetFilterString()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.filterString, typedOther.filterString);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(isSetBatchSize()).compareTo(typedOther.isSetBatchSize());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetBatchSize()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.batchSize, typedOther.batchSize);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
return 0;
}
@@ -692,6 +836,22 @@ public class TScan implements org.apache
}
first = false;
}
+ if (isSetFilterString()) {
+ if (!first) sb.append(", ");
+ sb.append("filterString:");
+ if (this.filterString == null) {
+ sb.append("null");
+ } else {
+ org.apache.thrift.TBaseHelper.toString(this.filterString, sb);
+ }
+ first = false;
+ }
+ if (isSetBatchSize()) {
+ if (!first) sb.append(", ");
+ sb.append("batchSize:");
+ sb.append(this.batchSize);
+ first = false;
+ }
sb.append(")");
return sb.toString();
}
@@ -759,7 +919,7 @@ public class TScan implements org.apache
struct.columns = new ArrayList<TColumn>(_list40.size);
for (int _i41 = 0; _i41 < _list40.size; ++_i41)
{
- TColumn _elem42; // optional
+ TColumn _elem42; // required
_elem42 = new TColumn();
_elem42.read(iprot);
struct.columns.add(_elem42);
@@ -796,6 +956,22 @@ public class TScan implements org.apache
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
+ case 7: // FILTER_STRING
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.filterString = iprot.readBinary();
+ struct.setFilterStringIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 8: // BATCH_SIZE
+ if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+ struct.batchSize = iprot.readI32();
+ struct.setBatchSizeIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
@@ -856,6 +1032,18 @@ public class TScan implements org.apache
oprot.writeFieldEnd();
}
}
+ if (struct.filterString != null) {
+ if (struct.isSetFilterString()) {
+ oprot.writeFieldBegin(FILTER_STRING_FIELD_DESC);
+ oprot.writeBinary(struct.filterString);
+ oprot.writeFieldEnd();
+ }
+ }
+ if (struct.isSetBatchSize()) {
+ oprot.writeFieldBegin(BATCH_SIZE_FIELD_DESC);
+ oprot.writeI32(struct.batchSize);
+ oprot.writeFieldEnd();
+ }
oprot.writeFieldStop();
oprot.writeStructEnd();
}
@@ -892,7 +1080,13 @@ public class TScan implements org.apache
if (struct.isSetTimeRange()) {
optionals.set(5);
}
- oprot.writeBitSet(optionals, 6);
+ if (struct.isSetFilterString()) {
+ optionals.set(6);
+ }
+ if (struct.isSetBatchSize()) {
+ optionals.set(7);
+ }
+ oprot.writeBitSet(optionals, 8);
if (struct.isSetStartRow()) {
oprot.writeBinary(struct.startRow);
}
@@ -917,12 +1111,18 @@ public class TScan implements org.apache
if (struct.isSetTimeRange()) {
struct.timeRange.write(oprot);
}
+ if (struct.isSetFilterString()) {
+ oprot.writeBinary(struct.filterString);
+ }
+ if (struct.isSetBatchSize()) {
+ oprot.writeI32(struct.batchSize);
+ }
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, TScan struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
- BitSet incoming = iprot.readBitSet(6);
+ BitSet incoming = iprot.readBitSet(8);
if (incoming.get(0)) {
struct.startRow = iprot.readBinary();
struct.setStartRowIsSet(true);
@@ -937,7 +1137,7 @@ public class TScan implements org.apache
struct.columns = new ArrayList<TColumn>(_list45.size);
for (int _i46 = 0; _i46 < _list45.size; ++_i46)
{
- TColumn _elem47; // optional
+ TColumn _elem47; // required
_elem47 = new TColumn();
_elem47.read(iprot);
struct.columns.add(_elem47);
@@ -958,6 +1158,14 @@ public class TScan implements org.apache
struct.timeRange.read(iprot);
struct.setTimeRangeIsSet(true);
}
+ if (incoming.get(6)) {
+ struct.filterString = iprot.readBinary();
+ struct.setFilterStringIsSet(true);
+ }
+ if (incoming.get(7)) {
+ struct.batchSize = iprot.readI32();
+ struct.setBatchSizeIsSet(true);
+ }
}
}
Modified: hbase/branches/0.94/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift?rev=1498900&r1=1498899&r2=1498900&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift (original)
+++ hbase/branches/0.94/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift Tue Jul 2 12:17:25 2013
@@ -89,8 +89,6 @@ enum TDeleteType {
*
* If you specify a time range and a timestamp the range is ignored.
* Timestamps on TColumns are ignored.
- *
- * TODO: Filter, Locks
*/
struct TGet {
1: required binary row,
@@ -100,6 +98,7 @@ struct TGet {
4: optional TTimeRange timeRange,
5: optional i32 maxVersions,
+ 6: optional binary filterString
}
/**
@@ -174,6 +173,8 @@ struct TScan {
4: optional i32 caching,
5: optional i32 maxVersions=1,
6: optional TTimeRange timeRange,
+ 7: optional binary filterString,
+ 8: optional i32 batchSize
}
//
Modified: hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java?rev=1498900&r1=1498899&r2=1498900&view=diff
==============================================================================
--- hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java (original)
+++ hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java Tue Jul 2 12:17:25 2013
@@ -504,6 +504,8 @@ public class TestThriftHBaseServiceHandl
columns.add(column);
scan.setColumns(columns);
scan.setStartRow("testScan".getBytes());
+ // only get the key part
+ scan.setFilterString(ByteBuffer.wrap(("KeyOnlyFilter()").getBytes()));
TColumnValue columnValue = new TColumnValue(ByteBuffer.wrap(familyAname), ByteBuffer.wrap(qualifierAname),
ByteBuffer.wrap(valueAname));
@@ -519,6 +521,7 @@ public class TestThriftHBaseServiceHandl
assertEquals(10, results.size());
for (int i = 0; i < 10; i++) {
assertArrayEquals(("testScan" + i).getBytes(), results.get(i).getRow());
+ assertArrayEquals(("").getBytes(), results.get(i).getColumnValues().get(0).getValue());
}
results = handler.getScannerRows(scanId, 10);