You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2012/05/30 00:31:55 UTC
svn commit: r1344034 [2/3] - in /hbase/trunk/hbase-server/src:
main/java/org/apache/hadoop/hbase/thrift/
main/java/org/apache/hadoop/hbase/thrift/generated/
main/resources/org/apache/hadoop/hbase/thrift/
test/java/org/apache/hadoop/hbase/thrift/
Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java?rev=1344034&r1=1344033&r2=1344034&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java Tue May 29 22:31:54 2012
@@ -301,7 +301,7 @@ public class Hbase {
*
* @param mutations list of mutation commands
*
- * @param attributes Put attributes
+ * @param attributes Mutation attributes
*/
public void mutateRow(ByteBuffer tableName, ByteBuffer row, List<Mutation> mutations, Map<ByteBuffer,ByteBuffer> attributes) throws IOError, IllegalArgument, org.apache.thrift.TException;
@@ -319,7 +319,7 @@ public class Hbase {
*
* @param timestamp timestamp
*
- * @param attributes Put attributes
+ * @param attributes Mutation attributes
*/
public void mutateRowTs(ByteBuffer tableName, ByteBuffer row, List<Mutation> mutations, long timestamp, Map<ByteBuffer,ByteBuffer> attributes) throws IOError, IllegalArgument, org.apache.thrift.TException;
@@ -333,7 +333,7 @@ public class Hbase {
*
* @param rowBatches list of row batches
*
- * @param attributes Put attributes
+ * @param attributes Mutation attributes
*/
public void mutateRows(ByteBuffer tableName, List<BatchMutation> rowBatches, Map<ByteBuffer,ByteBuffer> attributes) throws IOError, IllegalArgument, org.apache.thrift.TException;
@@ -349,7 +349,7 @@ public class Hbase {
*
* @param timestamp timestamp
*
- * @param attributes Put attributes
+ * @param attributes Mutation attributes
*/
public void mutateRowsTs(ByteBuffer tableName, List<BatchMutation> rowBatches, long timestamp, Map<ByteBuffer,ByteBuffer> attributes) throws IOError, IllegalArgument, org.apache.thrift.TException;
@@ -407,6 +407,18 @@ public class Hbase {
public void deleteAllRow(ByteBuffer tableName, ByteBuffer row, Map<ByteBuffer,ByteBuffer> attributes) throws IOError, org.apache.thrift.TException;
/**
+ * Increment a cell by the ammount.
+ * Increments can be applied async if hbase.regionserver.thrift.coalesceIncrement is set to true.
+ * False is the default. Turn to true if you need the extra performance and can accept some
+ * data loss if a thrift server dies with increments still in the queue.
+ *
+ * @param increment The single increment to apply
+ */
+ public void increment(TIncrement increment) throws IOError, org.apache.thrift.TException;
+
+ public void incrementRows(List<TIncrement> increments) throws IOError, org.apache.thrift.TException;
+
+ /**
* Completely delete the row's cells marked with a timestamp
* equal-to or older than the passed timestamp.
*
@@ -666,6 +678,10 @@ public class Hbase {
public void deleteAllRow(ByteBuffer tableName, ByteBuffer row, Map<ByteBuffer,ByteBuffer> attributes, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.deleteAllRow_call> resultHandler) throws org.apache.thrift.TException;
+ public void increment(TIncrement increment, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.increment_call> resultHandler) throws org.apache.thrift.TException;
+
+ public void incrementRows(List<TIncrement> increments, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.incrementRows_call> resultHandler) throws org.apache.thrift.TException;
+
public void deleteAllRowTs(ByteBuffer tableName, ByteBuffer row, long timestamp, Map<ByteBuffer,ByteBuffer> attributes, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.deleteAllRowTs_call> resultHandler) throws org.apache.thrift.TException;
public void scannerOpenWithScan(ByteBuffer tableName, TScan scan, Map<ByteBuffer,ByteBuffer> attributes, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.scannerOpenWithScan_call> resultHandler) throws org.apache.thrift.TException;
@@ -1508,6 +1524,52 @@ public class Hbase {
return;
}
+ public void increment(TIncrement increment) throws IOError, org.apache.thrift.TException
+ {
+ send_increment(increment);
+ recv_increment();
+ }
+
+ public void send_increment(TIncrement increment) throws org.apache.thrift.TException
+ {
+ increment_args args = new increment_args();
+ args.setIncrement(increment);
+ sendBase("increment", args);
+ }
+
+ public void recv_increment() throws IOError, org.apache.thrift.TException
+ {
+ increment_result result = new increment_result();
+ receiveBase(result, "increment");
+ if (result.io != null) {
+ throw result.io;
+ }
+ return;
+ }
+
+ public void incrementRows(List<TIncrement> increments) throws IOError, org.apache.thrift.TException
+ {
+ send_incrementRows(increments);
+ recv_incrementRows();
+ }
+
+ public void send_incrementRows(List<TIncrement> increments) throws org.apache.thrift.TException
+ {
+ incrementRows_args args = new incrementRows_args();
+ args.setIncrements(increments);
+ sendBase("incrementRows", args);
+ }
+
+ public void recv_incrementRows() throws IOError, org.apache.thrift.TException
+ {
+ incrementRows_result result = new incrementRows_result();
+ receiveBase(result, "incrementRows");
+ if (result.io != null) {
+ throw result.io;
+ }
+ return;
+ }
+
public void deleteAllRowTs(ByteBuffer tableName, ByteBuffer row, long timestamp, Map<ByteBuffer,ByteBuffer> attributes) throws IOError, org.apache.thrift.TException
{
send_deleteAllRowTs(tableName, row, timestamp, attributes);
@@ -2976,6 +3038,70 @@ public class Hbase {
}
}
+ public void increment(TIncrement increment, org.apache.thrift.async.AsyncMethodCallback<increment_call> resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ increment_call method_call = new increment_call(increment, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ public static class increment_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private TIncrement increment;
+ public increment_call(TIncrement increment, org.apache.thrift.async.AsyncMethodCallback<increment_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.increment = increment;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("increment", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ increment_args args = new increment_args();
+ args.setIncrement(increment);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws IOError, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_increment();
+ }
+ }
+
+ public void incrementRows(List<TIncrement> increments, org.apache.thrift.async.AsyncMethodCallback<incrementRows_call> resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ incrementRows_call method_call = new incrementRows_call(increments, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ public static class incrementRows_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private List<TIncrement> increments;
+ public incrementRows_call(List<TIncrement> increments, org.apache.thrift.async.AsyncMethodCallback<incrementRows_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.increments = increments;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("incrementRows", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ incrementRows_args args = new incrementRows_args();
+ args.setIncrements(increments);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws IOError, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_incrementRows();
+ }
+ }
+
public void deleteAllRowTs(ByteBuffer tableName, ByteBuffer row, long timestamp, Map<ByteBuffer,ByteBuffer> attributes, org.apache.thrift.async.AsyncMethodCallback<deleteAllRowTs_call> resultHandler) throws org.apache.thrift.TException {
checkReady();
deleteAllRowTs_call method_call = new deleteAllRowTs_call(tableName, row, timestamp, attributes, resultHandler, this, ___protocolFactory, ___transport);
@@ -3483,6 +3609,8 @@ public class Hbase {
processMap.put("deleteAll", new deleteAll());
processMap.put("deleteAllTs", new deleteAllTs());
processMap.put("deleteAllRow", new deleteAllRow());
+ processMap.put("increment", new increment());
+ processMap.put("incrementRows", new incrementRows());
processMap.put("deleteAllRowTs", new deleteAllRowTs());
processMap.put("scannerOpenWithScan", new scannerOpenWithScan());
processMap.put("scannerOpen", new scannerOpen());
@@ -4094,6 +4222,46 @@ public class Hbase {
}
}
+ private static class increment<I extends Iface> extends org.apache.thrift.ProcessFunction<I, increment_args> {
+ public increment() {
+ super("increment");
+ }
+
+ protected increment_args getEmptyArgsInstance() {
+ return new increment_args();
+ }
+
+ protected increment_result getResult(I iface, increment_args args) throws org.apache.thrift.TException {
+ increment_result result = new increment_result();
+ try {
+ iface.increment(args.increment);
+ } catch (IOError io) {
+ result.io = io;
+ }
+ return result;
+ }
+ }
+
+ private static class incrementRows<I extends Iface> extends org.apache.thrift.ProcessFunction<I, incrementRows_args> {
+ public incrementRows() {
+ super("incrementRows");
+ }
+
+ protected incrementRows_args getEmptyArgsInstance() {
+ return new incrementRows_args();
+ }
+
+ protected incrementRows_result getResult(I iface, incrementRows_args args) throws org.apache.thrift.TException {
+ incrementRows_result result = new incrementRows_result();
+ try {
+ iface.incrementRows(args.increments);
+ } catch (IOError io) {
+ result.io = io;
+ }
+ return result;
+ }
+ }
+
private static class deleteAllRowTs<I extends Iface> extends org.apache.thrift.ProcessFunction<I, deleteAllRowTs_args> {
public deleteAllRowTs() {
super("deleteAllRowTs");
@@ -6520,6 +6688,8 @@ public class Hbase {
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bit_vector = new BitSet(1);
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
@@ -9593,7 +9763,7 @@ public class Hbase {
for (int _i35 = 0; _i35 < _map34.size; ++_i35)
{
ByteBuffer _key36; // required
- ColumnDescriptor _val37; // required
+ ColumnDescriptor _val37; // optional
_key36 = iprot.readBinary();
_val37 = new ColumnDescriptor();
_val37.read(iprot);
@@ -9699,7 +9869,7 @@ public class Hbase {
for (int _i41 = 0; _i41 < _map40.size; ++_i41)
{
ByteBuffer _key42; // required
- ColumnDescriptor _val43; // required
+ ColumnDescriptor _val43; // optional
_key42 = iprot.readBinary();
_val43 = new ColumnDescriptor();
_val43.read(iprot);
@@ -13083,7 +13253,7 @@ public class Hbase {
for (int _i61 = 0; _i61 < _map60.size; ++_i61)
{
ByteBuffer _key62; // required
- ByteBuffer _val63; // required
+ ByteBuffer _val63; // optional
_key62 = iprot.readBinary();
_val63 = iprot.readBinary();
struct.attributes.put(_key62, _val63);
@@ -13213,7 +13383,7 @@ public class Hbase {
for (int _i67 = 0; _i67 < _map66.size; ++_i67)
{
ByteBuffer _key68; // required
- ByteBuffer _val69; // required
+ ByteBuffer _val69; // optional
_key68 = iprot.readBinary();
_val69 = iprot.readBinary();
struct.attributes.put(_key68, _val69);
@@ -14484,7 +14654,7 @@ public class Hbase {
for (int _i79 = 0; _i79 < _map78.size; ++_i79)
{
ByteBuffer _key80; // required
- ByteBuffer _val81; // required
+ ByteBuffer _val81; // optional
_key80 = iprot.readBinary();
_val81 = iprot.readBinary();
struct.attributes.put(_key80, _val81);
@@ -14627,7 +14797,7 @@ public class Hbase {
for (int _i85 = 0; _i85 < _map84.size; ++_i85)
{
ByteBuffer _key86; // required
- ByteBuffer _val87; // required
+ ByteBuffer _val87; // optional
_key86 = iprot.readBinary();
_val87 = iprot.readBinary();
struct.attributes.put(_key86, _val87);
@@ -15917,6 +16087,8 @@ public class Hbase {
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bit_vector = new BitSet(1);
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
@@ -15989,7 +16161,7 @@ public class Hbase {
for (int _i97 = 0; _i97 < _map96.size; ++_i97)
{
ByteBuffer _key98; // required
- ByteBuffer _val99; // required
+ ByteBuffer _val99; // optional
_key98 = iprot.readBinary();
_val99 = iprot.readBinary();
struct.attributes.put(_key98, _val99);
@@ -16145,7 +16317,7 @@ public class Hbase {
for (int _i103 = 0; _i103 < _map102.size; ++_i103)
{
ByteBuffer _key104; // required
- ByteBuffer _val105; // required
+ ByteBuffer _val105; // optional
_key104 = iprot.readBinary();
_val105 = iprot.readBinary();
struct.attributes.put(_key104, _val105);
@@ -17211,7 +17383,7 @@ public class Hbase {
for (int _i115 = 0; _i115 < _map114.size; ++_i115)
{
ByteBuffer _key116; // required
- ByteBuffer _val117; // required
+ ByteBuffer _val117; // optional
_key116 = iprot.readBinary();
_val117 = iprot.readBinary();
struct.attributes.put(_key116, _val117);
@@ -17326,7 +17498,7 @@ public class Hbase {
for (int _i121 = 0; _i121 < _map120.size; ++_i121)
{
ByteBuffer _key122; // required
- ByteBuffer _val123; // required
+ ByteBuffer _val123; // optional
_key122 = iprot.readBinary();
_val123 = iprot.readBinary();
struct.attributes.put(_key122, _val123);
@@ -18519,7 +18691,7 @@ public class Hbase {
for (int _i136 = 0; _i136 < _map135.size; ++_i136)
{
ByteBuffer _key137; // required
- ByteBuffer _val138; // required
+ ByteBuffer _val138; // optional
_key137 = iprot.readBinary();
_val138 = iprot.readBinary();
struct.attributes.put(_key137, _val138);
@@ -18671,7 +18843,7 @@ public class Hbase {
for (int _i147 = 0; _i147 < _map146.size; ++_i147)
{
ByteBuffer _key148; // required
- ByteBuffer _val149; // required
+ ByteBuffer _val149; // optional
_key148 = iprot.readBinary();
_val149 = iprot.readBinary();
struct.attributes.put(_key148, _val149);
@@ -19835,7 +20007,7 @@ public class Hbase {
for (int _i159 = 0; _i159 < _map158.size; ++_i159)
{
ByteBuffer _key160; // required
- ByteBuffer _val161; // required
+ ByteBuffer _val161; // optional
_key160 = iprot.readBinary();
_val161 = iprot.readBinary();
struct.attributes.put(_key160, _val161);
@@ -19963,7 +20135,7 @@ public class Hbase {
for (int _i165 = 0; _i165 < _map164.size; ++_i165)
{
ByteBuffer _key166; // required
- ByteBuffer _val167; // required
+ ByteBuffer _val167; // optional
_key166 = iprot.readBinary();
_val167 = iprot.readBinary();
struct.attributes.put(_key166, _val167);
@@ -21242,7 +21414,7 @@ public class Hbase {
for (int _i180 = 0; _i180 < _map179.size; ++_i180)
{
ByteBuffer _key181; // required
- ByteBuffer _val182; // required
+ ByteBuffer _val182; // optional
_key181 = iprot.readBinary();
_val182 = iprot.readBinary();
struct.attributes.put(_key181, _val182);
@@ -21407,7 +21579,7 @@ public class Hbase {
for (int _i191 = 0; _i191 < _map190.size; ++_i191)
{
ByteBuffer _key192; // required
- ByteBuffer _val193; // required
+ ByteBuffer _val193; // optional
_key192 = iprot.readBinary();
_val193 = iprot.readBinary();
struct.attributes.put(_key192, _val193);
@@ -22493,7 +22665,7 @@ public class Hbase {
for (int _i206 = 0; _i206 < _map205.size; ++_i206)
{
ByteBuffer _key207; // required
- ByteBuffer _val208; // required
+ ByteBuffer _val208; // optional
_key207 = iprot.readBinary();
_val208 = iprot.readBinary();
struct.attributes.put(_key207, _val208);
@@ -22630,7 +22802,7 @@ public class Hbase {
for (int _i217 = 0; _i217 < _map216.size; ++_i217)
{
ByteBuffer _key218; // required
- ByteBuffer _val219; // required
+ ByteBuffer _val219; // optional
_key218 = iprot.readBinary();
_val219 = iprot.readBinary();
struct.attributes.put(_key218, _val219);
@@ -23843,7 +24015,7 @@ public class Hbase {
for (int _i235 = 0; _i235 < _map234.size; ++_i235)
{
ByteBuffer _key236; // required
- ByteBuffer _val237; // required
+ ByteBuffer _val237; // optional
_key236 = iprot.readBinary();
_val237 = iprot.readBinary();
struct.attributes.put(_key236, _val237);
@@ -24017,7 +24189,7 @@ public class Hbase {
for (int _i251 = 0; _i251 < _map250.size; ++_i251)
{
ByteBuffer _key252; // required
- ByteBuffer _val253; // required
+ ByteBuffer _val253; // optional
_key252 = iprot.readBinary();
_val253 = iprot.readBinary();
struct.attributes.put(_key252, _val253);
@@ -25201,7 +25373,7 @@ public class Hbase {
for (int _i266 = 0; _i266 < _map265.size; ++_i266)
{
ByteBuffer _key267; // required
- ByteBuffer _val268; // required
+ ByteBuffer _val268; // optional
_key267 = iprot.readBinary();
_val268 = iprot.readBinary();
struct.attributes.put(_key267, _val268);
@@ -25351,7 +25523,7 @@ public class Hbase {
for (int _i277 = 0; _i277 < _map276.size; ++_i277)
{
ByteBuffer _key278; // required
- ByteBuffer _val279; // required
+ ByteBuffer _val279; // optional
_key278 = iprot.readBinary();
_val279 = iprot.readBinary();
struct.attributes.put(_key278, _val279);
@@ -26650,7 +26822,7 @@ public class Hbase {
for (int _i295 = 0; _i295 < _map294.size; ++_i295)
{
ByteBuffer _key296; // required
- ByteBuffer _val297; // required
+ ByteBuffer _val297; // optional
_key296 = iprot.readBinary();
_val297 = iprot.readBinary();
struct.attributes.put(_key296, _val297);
@@ -26837,7 +27009,7 @@ public class Hbase {
for (int _i311 = 0; _i311 < _map310.size; ++_i311)
{
ByteBuffer _key312; // required
- ByteBuffer _val313; // required
+ ByteBuffer _val313; // optional
_key312 = iprot.readBinary();
_val313 = iprot.readBinary();
struct.attributes.put(_key312, _val313);
@@ -27386,7 +27558,7 @@ public class Hbase {
*/
public List<Mutation> mutations; // required
/**
- * Put attributes
+ * Mutation attributes
*/
public Map<ByteBuffer,ByteBuffer> attributes; // required
@@ -27405,7 +27577,7 @@ public class Hbase {
*/
MUTATIONS((short)3, "mutations"),
/**
- * Put attributes
+ * Mutation attributes
*/
ATTRIBUTES((short)4, "attributes");
@@ -27687,14 +27859,14 @@ public class Hbase {
}
/**
- * Put attributes
+ * Mutation attributes
*/
public Map<ByteBuffer,ByteBuffer> getAttributes() {
return this.attributes;
}
/**
- * Put attributes
+ * Mutation attributes
*/
public mutateRow_args setAttributes(Map<ByteBuffer,ByteBuffer> attributes) {
this.attributes = attributes;
@@ -28031,7 +28203,7 @@ public class Hbase {
for (int _i326 = 0; _i326 < _map325.size; ++_i326)
{
ByteBuffer _key327; // required
- ByteBuffer _val328; // required
+ ByteBuffer _val328; // optional
_key327 = iprot.readBinary();
_val328 = iprot.readBinary();
struct.attributes.put(_key327, _val328);
@@ -28184,7 +28356,7 @@ public class Hbase {
for (int _i337 = 0; _i337 < _map336.size; ++_i337)
{
ByteBuffer _key338; // required
- ByteBuffer _val339; // required
+ ByteBuffer _val339; // optional
_key338 = iprot.readBinary();
_val339 = iprot.readBinary();
struct.attributes.put(_key338, _val339);
@@ -28686,7 +28858,7 @@ public class Hbase {
*/
public long timestamp; // required
/**
- * Put attributes
+ * Mutation attributes
*/
public Map<ByteBuffer,ByteBuffer> attributes; // required
@@ -28709,7 +28881,7 @@ public class Hbase {
*/
TIMESTAMP((short)4, "timestamp"),
/**
- * Put attributes
+ * Mutation attributes
*/
ATTRIBUTES((short)5, "attributes");
@@ -29034,14 +29206,14 @@ public class Hbase {
}
/**
- * Put attributes
+ * Mutation attributes
*/
public Map<ByteBuffer,ByteBuffer> getAttributes() {
return this.attributes;
}
/**
- * Put attributes
+ * Mutation attributes
*/
public mutateRowTs_args setAttributes(Map<ByteBuffer,ByteBuffer> attributes) {
this.attributes = attributes;
@@ -29424,7 +29596,7 @@ public class Hbase {
for (int _i344 = 0; _i344 < _map343.size; ++_i344)
{
ByteBuffer _key345; // required
- ByteBuffer _val346; // required
+ ByteBuffer _val346; // optional
_key345 = iprot.readBinary();
_val346 = iprot.readBinary();
struct.attributes.put(_key345, _val346);
@@ -29590,7 +29762,7 @@ public class Hbase {
for (int _i355 = 0; _i355 < _map354.size; ++_i355)
{
ByteBuffer _key356; // required
- ByteBuffer _val357; // required
+ ByteBuffer _val357; // optional
_key356 = iprot.readBinary();
_val357 = iprot.readBinary();
struct.attributes.put(_key356, _val357);
@@ -30082,7 +30254,7 @@ public class Hbase {
*/
public List<BatchMutation> rowBatches; // required
/**
- * Put attributes
+ * Mutation attributes
*/
public Map<ByteBuffer,ByteBuffer> attributes; // required
@@ -30097,7 +30269,7 @@ public class Hbase {
*/
ROW_BATCHES((short)2, "rowBatches"),
/**
- * Put attributes
+ * Mutation attributes
*/
ATTRIBUTES((short)3, "attributes");
@@ -30329,14 +30501,14 @@ public class Hbase {
}
/**
- * Put attributes
+ * Mutation attributes
*/
public Map<ByteBuffer,ByteBuffer> getAttributes() {
return this.attributes;
}
/**
- * Put attributes
+ * Mutation attributes
*/
public mutateRows_args setAttributes(Map<ByteBuffer,ByteBuffer> attributes) {
this.attributes = attributes;
@@ -30625,7 +30797,7 @@ public class Hbase {
for (int _i362 = 0; _i362 < _map361.size; ++_i362)
{
ByteBuffer _key363; // required
- ByteBuffer _val364; // required
+ ByteBuffer _val364; // optional
_key363 = iprot.readBinary();
_val364 = iprot.readBinary();
struct.attributes.put(_key363, _val364);
@@ -30763,7 +30935,7 @@ public class Hbase {
for (int _i373 = 0; _i373 < _map372.size; ++_i373)
{
ByteBuffer _key374; // required
- ByteBuffer _val375; // required
+ ByteBuffer _val375; // optional
_key374 = iprot.readBinary();
_val375 = iprot.readBinary();
struct.attributes.put(_key374, _val375);
@@ -31260,7 +31432,7 @@ public class Hbase {
*/
public long timestamp; // required
/**
- * Put attributes
+ * Mutation attributes
*/
public Map<ByteBuffer,ByteBuffer> attributes; // required
@@ -31279,7 +31451,7 @@ public class Hbase {
*/
TIMESTAMP((short)3, "timestamp"),
/**
- * Put attributes
+ * Mutation attributes
*/
ATTRIBUTES((short)4, "attributes");
@@ -31554,14 +31726,14 @@ public class Hbase {
}
/**
- * Put attributes
+ * Mutation attributes
*/
public Map<ByteBuffer,ByteBuffer> getAttributes() {
return this.attributes;
}
/**
- * Put attributes
+ * Mutation attributes
*/
public mutateRowsTs_args setAttributes(Map<ByteBuffer,ByteBuffer> attributes) {
this.attributes = attributes;
@@ -31894,7 +32066,7 @@ public class Hbase {
for (int _i380 = 0; _i380 < _map379.size; ++_i380)
{
ByteBuffer _key381; // required
- ByteBuffer _val382; // required
+ ByteBuffer _val382; // optional
_key381 = iprot.readBinary();
_val382 = iprot.readBinary();
struct.attributes.put(_key381, _val382);
@@ -32045,7 +32217,7 @@ public class Hbase {
for (int _i391 = 0; _i391 < _map390.size; ++_i391)
{
ByteBuffer _key392; // required
- ByteBuffer _val393; // required
+ ByteBuffer _val393; // optional
_key392 = iprot.readBinary();
_val393 = iprot.readBinary();
struct.attributes.put(_key392, _val393);
@@ -33659,6 +33831,8 @@ public class Hbase {
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bit_vector = new BitSet(1);
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
@@ -34450,7 +34624,7 @@ public class Hbase {
for (int _i395 = 0; _i395 < _map394.size; ++_i395)
{
ByteBuffer _key396; // required
- ByteBuffer _val397; // required
+ ByteBuffer _val397; // optional
_key396 = iprot.readBinary();
_val397 = iprot.readBinary();
struct.attributes.put(_key396, _val397);
@@ -34580,7 +34754,7 @@ public class Hbase {
for (int _i401 = 0; _i401 < _map400.size; ++_i401)
{
ByteBuffer _key402; // required
- ByteBuffer _val403; // required
+ ByteBuffer _val403; // optional
_key402 = iprot.readBinary();
_val403 = iprot.readBinary();
struct.attributes.put(_key402, _val403);
@@ -35697,7 +35871,7 @@ public class Hbase {
for (int _i405 = 0; _i405 < _map404.size; ++_i405)
{
ByteBuffer _key406; // required
- ByteBuffer _val407; // required
+ ByteBuffer _val407; // optional
_key406 = iprot.readBinary();
_val407 = iprot.readBinary();
struct.attributes.put(_key406, _val407);
@@ -35840,7 +36014,7 @@ public class Hbase {
for (int _i411 = 0; _i411 < _map410.size; ++_i411)
{
ByteBuffer _key412; // required
- ByteBuffer _val413; // required
+ ByteBuffer _val413; // optional
_key412 = iprot.readBinary();
_val413 = iprot.readBinary();
struct.attributes.put(_key412, _val413);
@@ -36752,7 +36926,7 @@ public class Hbase {
for (int _i415 = 0; _i415 < _map414.size; ++_i415)
{
ByteBuffer _key416; // required
- ByteBuffer _val417; // required
+ ByteBuffer _val417; // optional
_key416 = iprot.readBinary();
_val417 = iprot.readBinary();
struct.attributes.put(_key416, _val417);
@@ -36867,7 +37041,7 @@ public class Hbase {
for (int _i421 = 0; _i421 < _map420.size; ++_i421)
{
ByteBuffer _key422; // required
- ByteBuffer _val423; // required
+ ByteBuffer _val423; // optional
_key422 = iprot.readBinary();
_val423 = iprot.readBinary();
struct.attributes.put(_key422, _val423);
@@ -37235,55 +37409,28 @@ public class Hbase {
}
- public static class deleteAllRowTs_args implements org.apache.thrift.TBase<deleteAllRowTs_args, deleteAllRowTs_args._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("deleteAllRowTs_args");
+ public static class increment_args implements org.apache.thrift.TBase<increment_args, increment_args._Fields>, java.io.Serializable, Cloneable {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("increment_args");
- private static final org.apache.thrift.protocol.TField TABLE_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("tableName", org.apache.thrift.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)2);
- private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)3);
- private static final org.apache.thrift.protocol.TField ATTRIBUTES_FIELD_DESC = new org.apache.thrift.protocol.TField("attributes", org.apache.thrift.protocol.TType.MAP, (short)4);
+ private static final org.apache.thrift.protocol.TField INCREMENT_FIELD_DESC = new org.apache.thrift.protocol.TField("increment", org.apache.thrift.protocol.TType.STRUCT, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
- schemes.put(StandardScheme.class, new deleteAllRowTs_argsStandardSchemeFactory());
- schemes.put(TupleScheme.class, new deleteAllRowTs_argsTupleSchemeFactory());
+ schemes.put(StandardScheme.class, new increment_argsStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new increment_argsTupleSchemeFactory());
}
/**
- * name of table
- */
- public ByteBuffer tableName; // required
- /**
- * key of the row to be completely deleted.
- */
- public ByteBuffer row; // required
- /**
- * timestamp
- */
- public long timestamp; // required
- /**
- * Delete attributes
+ * The single increment to apply
*/
- public Map<ByteBuffer,ByteBuffer> attributes; // required
+ public TIncrement increment; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
/**
- * name of table
- */
- TABLE_NAME((short)1, "tableName"),
- /**
- * key of the row to be completely deleted.
- */
- ROW((short)2, "row"),
- /**
- * timestamp
- */
- TIMESTAMP((short)3, "timestamp"),
- /**
- * Delete attributes
+ * The single increment to apply
*/
- ATTRIBUTES((short)4, "attributes");
+ INCREMENT((short)1, "increment");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
@@ -37298,14 +37445,8 @@ public class Hbase {
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
- case 1: // TABLE_NAME
- return TABLE_NAME;
- case 2: // ROW
- return ROW;
- case 3: // TIMESTAMP
- return TIMESTAMP;
- case 4: // ATTRIBUTES
- return ATTRIBUTES;
+ case 1: // INCREMENT
+ return INCREMENT;
default:
return null;
}
@@ -37346,266 +37487,80 @@ public class Hbase {
}
// isset id assignments
- private static final int __TIMESTAMP_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.TABLE_NAME, new org.apache.thrift.meta_data.FieldMetaData("tableName", org.apache.thrift.TFieldRequirementType.DEFAULT,
- new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , "Text")));
- tmpMap.put(_Fields.ROW, new org.apache.thrift.meta_data.FieldMetaData("row", org.apache.thrift.TFieldRequirementType.DEFAULT,
- new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , "Text")));
- tmpMap.put(_Fields.TIMESTAMP, new org.apache.thrift.meta_data.FieldMetaData("timestamp", org.apache.thrift.TFieldRequirementType.DEFAULT,
- new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
- tmpMap.put(_Fields.ATTRIBUTES, new org.apache.thrift.meta_data.FieldMetaData("attributes", org.apache.thrift.TFieldRequirementType.DEFAULT,
- new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
- new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , "Text"),
- new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , "Text"))));
+ tmpMap.put(_Fields.INCREMENT, new org.apache.thrift.meta_data.FieldMetaData("increment", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TIncrement.class)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(deleteAllRowTs_args.class, metaDataMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(increment_args.class, metaDataMap);
}
- public deleteAllRowTs_args() {
+ public increment_args() {
}
- public deleteAllRowTs_args(
- ByteBuffer tableName,
- ByteBuffer row,
- long timestamp,
- Map<ByteBuffer,ByteBuffer> attributes)
+ public increment_args(
+ TIncrement increment)
{
this();
- this.tableName = tableName;
- this.row = row;
- this.timestamp = timestamp;
- setTimestampIsSet(true);
- this.attributes = attributes;
+ this.increment = increment;
}
/**
* Performs a deep copy on <i>other</i>.
*/
- public deleteAllRowTs_args(deleteAllRowTs_args other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- if (other.isSetTableName()) {
- this.tableName = other.tableName;
- }
- if (other.isSetRow()) {
- this.row = other.row;
- }
- this.timestamp = other.timestamp;
- if (other.isSetAttributes()) {
- Map<ByteBuffer,ByteBuffer> __this__attributes = new HashMap<ByteBuffer,ByteBuffer>();
- for (Map.Entry<ByteBuffer, ByteBuffer> other_element : other.attributes.entrySet()) {
-
- ByteBuffer other_element_key = other_element.getKey();
- ByteBuffer other_element_value = other_element.getValue();
-
- ByteBuffer __this__attributes_copy_key = other_element_key;
-
- ByteBuffer __this__attributes_copy_value = other_element_value;
-
- __this__attributes.put(__this__attributes_copy_key, __this__attributes_copy_value);
- }
- this.attributes = __this__attributes;
+ public increment_args(increment_args other) {
+ if (other.isSetIncrement()) {
+ this.increment = new TIncrement(other.increment);
}
}
- public deleteAllRowTs_args deepCopy() {
- return new deleteAllRowTs_args(this);
+ public increment_args deepCopy() {
+ return new increment_args(this);
}
@Override
public void clear() {
- this.tableName = null;
- this.row = null;
- setTimestampIsSet(false);
- this.timestamp = 0;
- this.attributes = null;
- }
-
- /**
- * name of table
- */
- public byte[] getTableName() {
- setTableName(org.apache.thrift.TBaseHelper.rightSize(tableName));
- return tableName == null ? null : tableName.array();
- }
-
- public ByteBuffer bufferForTableName() {
- return tableName;
- }
-
- /**
- * name of table
- */
- public deleteAllRowTs_args setTableName(byte[] tableName) {
- setTableName(tableName == null ? (ByteBuffer)null : ByteBuffer.wrap(tableName));
- return this;
- }
-
- public deleteAllRowTs_args setTableName(ByteBuffer tableName) {
- this.tableName = tableName;
- return this;
- }
-
- public void unsetTableName() {
- this.tableName = null;
- }
-
- /** Returns true if field tableName is set (has been assigned a value) and false otherwise */
- public boolean isSetTableName() {
- return this.tableName != null;
- }
-
- public void setTableNameIsSet(boolean value) {
- if (!value) {
- this.tableName = null;
- }
- }
-
- /**
- * key of the row to be completely deleted.
- */
- public byte[] getRow() {
- setRow(org.apache.thrift.TBaseHelper.rightSize(row));
- return row == null ? null : row.array();
- }
-
- public ByteBuffer bufferForRow() {
- return row;
- }
-
- /**
- * key of the row to be completely deleted.
- */
- public deleteAllRowTs_args setRow(byte[] row) {
- setRow(row == null ? (ByteBuffer)null : ByteBuffer.wrap(row));
- return this;
- }
-
- public deleteAllRowTs_args setRow(ByteBuffer row) {
- this.row = row;
- return this;
- }
-
- public void unsetRow() {
- this.row = null;
- }
-
- /** Returns true if field row is set (has been assigned a value) and false otherwise */
- public boolean isSetRow() {
- return this.row != null;
- }
-
- public void setRowIsSet(boolean value) {
- if (!value) {
- this.row = null;
- }
- }
-
- /**
- * timestamp
- */
- public long getTimestamp() {
- return this.timestamp;
- }
-
- /**
- * timestamp
- */
- public deleteAllRowTs_args setTimestamp(long timestamp) {
- this.timestamp = timestamp;
- setTimestampIsSet(true);
- return this;
- }
-
- public void unsetTimestamp() {
- __isset_bit_vector.clear(__TIMESTAMP_ISSET_ID);
- }
-
- /** Returns true if field timestamp is set (has been assigned a value) and false otherwise */
- public boolean isSetTimestamp() {
- return __isset_bit_vector.get(__TIMESTAMP_ISSET_ID);
- }
-
- public void setTimestampIsSet(boolean value) {
- __isset_bit_vector.set(__TIMESTAMP_ISSET_ID, value);
- }
-
- public int getAttributesSize() {
- return (this.attributes == null) ? 0 : this.attributes.size();
- }
-
- public void putToAttributes(ByteBuffer key, ByteBuffer val) {
- if (this.attributes == null) {
- this.attributes = new HashMap<ByteBuffer,ByteBuffer>();
- }
- this.attributes.put(key, val);
+ this.increment = null;
}
/**
- * Delete attributes
+ * The single increment to apply
*/
- public Map<ByteBuffer,ByteBuffer> getAttributes() {
- return this.attributes;
+ public TIncrement getIncrement() {
+ return this.increment;
}
/**
- * Delete attributes
+ * The single increment to apply
*/
- public deleteAllRowTs_args setAttributes(Map<ByteBuffer,ByteBuffer> attributes) {
- this.attributes = attributes;
+ public increment_args setIncrement(TIncrement increment) {
+ this.increment = increment;
return this;
}
- public void unsetAttributes() {
- this.attributes = null;
+ public void unsetIncrement() {
+ this.increment = null;
}
- /** Returns true if field attributes is set (has been assigned a value) and false otherwise */
- public boolean isSetAttributes() {
- return this.attributes != null;
+ /** Returns true if field increment is set (has been assigned a value) and false otherwise */
+ public boolean isSetIncrement() {
+ return this.increment != null;
}
- public void setAttributesIsSet(boolean value) {
+ public void setIncrementIsSet(boolean value) {
if (!value) {
- this.attributes = null;
+ this.increment = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
- case TABLE_NAME:
- if (value == null) {
- unsetTableName();
- } else {
- setTableName((ByteBuffer)value);
- }
- break;
-
- case ROW:
- if (value == null) {
- unsetRow();
- } else {
- setRow((ByteBuffer)value);
- }
- break;
-
- case TIMESTAMP:
- if (value == null) {
- unsetTimestamp();
- } else {
- setTimestamp((Long)value);
- }
- break;
-
- case ATTRIBUTES:
+ case INCREMENT:
if (value == null) {
- unsetAttributes();
+ unsetIncrement();
} else {
- setAttributes((Map<ByteBuffer,ByteBuffer>)value);
+ setIncrement((TIncrement)value);
}
break;
@@ -37614,17 +37569,8 @@ public class Hbase {
public Object getFieldValue(_Fields field) {
switch (field) {
- case TABLE_NAME:
- return getTableName();
-
- case ROW:
- return getRow();
-
- case TIMESTAMP:
- return Long.valueOf(getTimestamp());
-
- case ATTRIBUTES:
- return getAttributes();
+ case INCREMENT:
+ return getIncrement();
}
throw new IllegalStateException();
@@ -37637,14 +37583,8 @@ public class Hbase {
}
switch (field) {
- case TABLE_NAME:
- return isSetTableName();
- case ROW:
- return isSetRow();
- case TIMESTAMP:
- return isSetTimestamp();
- case ATTRIBUTES:
- return isSetAttributes();
+ case INCREMENT:
+ return isSetIncrement();
}
throw new IllegalStateException();
}
@@ -37653,48 +37593,21 @@ public class Hbase {
public boolean equals(Object that) {
if (that == null)
return false;
- if (that instanceof deleteAllRowTs_args)
- return this.equals((deleteAllRowTs_args)that);
+ if (that instanceof increment_args)
+ return this.equals((increment_args)that);
return false;
}
- public boolean equals(deleteAllRowTs_args that) {
+ public boolean equals(increment_args that) {
if (that == null)
return false;
- boolean this_present_tableName = true && this.isSetTableName();
- boolean that_present_tableName = true && that.isSetTableName();
- if (this_present_tableName || that_present_tableName) {
- if (!(this_present_tableName && that_present_tableName))
- return false;
- if (!this.tableName.equals(that.tableName))
- return false;
- }
-
- boolean this_present_row = true && this.isSetRow();
- boolean that_present_row = true && that.isSetRow();
- if (this_present_row || that_present_row) {
- if (!(this_present_row && that_present_row))
- return false;
- if (!this.row.equals(that.row))
- return false;
- }
-
- boolean this_present_timestamp = true;
- boolean that_present_timestamp = true;
- if (this_present_timestamp || that_present_timestamp) {
- if (!(this_present_timestamp && that_present_timestamp))
- return false;
- if (this.timestamp != that.timestamp)
- return false;
- }
-
- boolean this_present_attributes = true && this.isSetAttributes();
- boolean that_present_attributes = true && that.isSetAttributes();
- if (this_present_attributes || that_present_attributes) {
- if (!(this_present_attributes && that_present_attributes))
+ boolean this_present_increment = true && this.isSetIncrement();
+ boolean that_present_increment = true && that.isSetIncrement();
+ if (this_present_increment || that_present_increment) {
+ if (!(this_present_increment && that_present_increment))
return false;
- if (!this.attributes.equals(that.attributes))
+ if (!this.increment.equals(that.increment))
return false;
}
@@ -37706,50 +37619,20 @@ public class Hbase {
return 0;
}
- public int compareTo(deleteAllRowTs_args other) {
+ public int compareTo(increment_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
- deleteAllRowTs_args typedOther = (deleteAllRowTs_args)other;
+ increment_args typedOther = (increment_args)other;
- lastComparison = Boolean.valueOf(isSetTableName()).compareTo(typedOther.isSetTableName());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (isSetTableName()) {
- lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tableName, typedOther.tableName);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(isSetRow()).compareTo(typedOther.isSetRow());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (isSetRow()) {
- lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.row, typedOther.row);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(isSetTimestamp()).compareTo(typedOther.isSetTimestamp());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (isSetTimestamp()) {
- lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.timestamp, typedOther.timestamp);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(isSetAttributes()).compareTo(typedOther.isSetAttributes());
+ lastComparison = Boolean.valueOf(isSetIncrement()).compareTo(typedOther.isSetIncrement());
if (lastComparison != 0) {
return lastComparison;
}
- if (isSetAttributes()) {
- lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.attributes, typedOther.attributes);
+ if (isSetIncrement()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.increment, typedOther.increment);
if (lastComparison != 0) {
return lastComparison;
}
@@ -37771,34 +37654,14 @@ public class Hbase {
@Override
public String toString() {
- StringBuilder sb = new StringBuilder("deleteAllRowTs_args(");
+ StringBuilder sb = new StringBuilder("increment_args(");
boolean first = true;
- sb.append("tableName:");
- if (this.tableName == null) {
- sb.append("null");
- } else {
- sb.append(this.tableName);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("row:");
- if (this.row == null) {
- sb.append("null");
- } else {
- sb.append(this.row);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("timestamp:");
- sb.append(this.timestamp);
- first = false;
- if (!first) sb.append(", ");
- sb.append("attributes:");
- if (this.attributes == null) {
+ sb.append("increment:");
+ if (this.increment == null) {
sb.append("null");
} else {
- sb.append(this.attributes);
+ sb.append(this.increment);
}
first = false;
sb.append(")");
@@ -37819,23 +37682,21 @@ public class Hbase {
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
- private static class deleteAllRowTs_argsStandardSchemeFactory implements SchemeFactory {
- public deleteAllRowTs_argsStandardScheme getScheme() {
- return new deleteAllRowTs_argsStandardScheme();
+ private static class increment_argsStandardSchemeFactory implements SchemeFactory {
+ public increment_argsStandardScheme getScheme() {
+ return new increment_argsStandardScheme();
}
}
- private static class deleteAllRowTs_argsStandardScheme extends StandardScheme<deleteAllRowTs_args> {
+ private static class increment_argsStandardScheme extends StandardScheme<increment_args> {
- public void read(org.apache.thrift.protocol.TProtocol iprot, deleteAllRowTs_args struct) throws org.apache.thrift.TException {
+ public void read(org.apache.thrift.protocol.TProtocol iprot, increment_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
@@ -37845,43 +37706,1852 @@ public class Hbase {
break;
}
switch (schemeField.id) {
- case 1: // TABLE_NAME
- if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
- struct.tableName = iprot.readBinary();
- struct.setTableNameIsSet(true);
- } else {
- org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
- }
- break;
- case 2: // ROW
- if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
- struct.row = iprot.readBinary();
- struct.setRowIsSet(true);
- } else {
- org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
- }
- break;
- case 3: // TIMESTAMP
- if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
- struct.timestamp = iprot.readI64();
- struct.setTimestampIsSet(true);
+ case 1: // INCREMENT
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+ struct.increment = new TIncrement();
+ struct.increment.read(iprot);
+ struct.setIncrementIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
- case 4: // ATTRIBUTES
- if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
- {
- org.apache.thrift.protocol.TMap _map424 = iprot.readMapBegin();
- struct.attributes = new HashMap<ByteBuffer,ByteBuffer>(2*_map424.size);
- for (int _i425 = 0; _i425 < _map424.size; ++_i425)
- {
- ByteBuffer _key426; // required
- ByteBuffer _val427; // required
- _key426 = iprot.readBinary();
- _val427 = iprot.readBinary();
- struct.attributes.put(_key426, _val427);
- }
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+
+ // check for required fields of primitive type, which can't be checked in the validate method
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, increment_args struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.increment != null) {
+ oprot.writeFieldBegin(INCREMENT_FIELD_DESC);
+ struct.increment.write(oprot);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class increment_argsTupleSchemeFactory implements SchemeFactory {
+ public increment_argsTupleScheme getScheme() {
+ return new increment_argsTupleScheme();
+ }
+ }
+
+ private static class increment_argsTupleScheme extends TupleScheme<increment_args> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, increment_args struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ BitSet optionals = new BitSet();
+ if (struct.isSetIncrement()) {
+ optionals.set(0);
+ }
+ oprot.writeBitSet(optionals, 1);
+ if (struct.isSetIncrement()) {
+ struct.increment.write(oprot);
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, increment_args struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ BitSet incoming = iprot.readBitSet(1);
+ if (incoming.get(0)) {
+ struct.increment = new TIncrement();
+ struct.increment.read(iprot);
+ struct.setIncrementIsSet(true);
+ }
+ }
+ }
+
+ }
+
+ public static class increment_result implements org.apache.thrift.TBase<increment_result, increment_result._Fields>, java.io.Serializable, Cloneable {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("increment_result");
+
+ private static final org.apache.thrift.protocol.TField IO_FIELD_DESC = new org.apache.thrift.protocol.TField("io", org.apache.thrift.protocol.TType.STRUCT, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new increment_resultStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new increment_resultTupleSchemeFactory());
+ }
+
+ public IOError io; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ IO((short)1, "io");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // IO
+ return IO;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.IO, new org.apache.thrift.meta_data.FieldMetaData("io", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(increment_result.class, metaDataMap);
+ }
+
+ public increment_result() {
+ }
+
+ public increment_result(
+ IOError io)
+ {
+ this();
+ this.io = io;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public increment_result(increment_result other) {
+ if (other.isSetIo()) {
+ this.io = new IOError(other.io);
+ }
+ }
+
+ public increment_result deepCopy() {
+ return new increment_result(this);
+ }
+
+ @Override
+ public void clear() {
+ this.io = null;
+ }
+
+ public IOError getIo() {
+ return this.io;
+ }
+
+ public increment_result setIo(IOError io) {
+ this.io = io;
+ return this;
+ }
+
+ public void unsetIo() {
+ this.io = null;
+ }
+
+ /** Returns true if field io is set (has been assigned a value) and false otherwise */
+ public boolean isSetIo() {
+ return this.io != null;
+ }
+
+ public void setIoIsSet(boolean value) {
+ if (!value) {
+ this.io = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case IO:
+ if (value == null) {
+ unsetIo();
+ } else {
+ setIo((IOError)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case IO:
+ return getIo();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case IO:
+ return isSetIo();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof increment_result)
+ return this.equals((increment_result)that);
+ return false;
+ }
+
+ public boolean equals(increment_result that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_io = true && this.isSetIo();
+ boolean that_present_io = true && that.isSetIo();
+ if (this_present_io || that_present_io) {
+ if (!(this_present_io && that_present_io))
+ return false;
+ if (!this.io.equals(that.io))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return 0;
+ }
+
+ public int compareTo(increment_result other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+ increment_result typedOther = (increment_result)other;
+
+ lastComparison = Boolean.valueOf(isSetIo()).compareTo(typedOther.isSetIo());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetIo()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.io, typedOther.io);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("increment_result(");
+ boolean first = true;
+
+ sb.append("io:");
+ if (this.io == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.io);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class increment_resultStandardSchemeFactory implements SchemeFactory {
+ public increment_resultStandardScheme getScheme() {
+ return new increment_resultStandardScheme();
+ }
+ }
+
+ private static class increment_resultStandardScheme extends StandardScheme<increment_result> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, increment_result struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // IO
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+ struct.io = new IOError();
+ struct.io.read(iprot);
+ struct.setIoIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+
+ // check for required fields of primitive type, which can't be checked in the validate method
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, increment_result struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.io != null) {
+ oprot.writeFieldBegin(IO_FIELD_DESC);
+ struct.io.write(oprot);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class increment_resultTupleSchemeFactory implements SchemeFactory {
+ public increment_resultTupleScheme getScheme() {
+ return new increment_resultTupleScheme();
+ }
+ }
+
+ private static class increment_resultTupleScheme extends TupleScheme<increment_result> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, increment_result struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ BitSet optionals = new BitSet();
+ if (struct.isSetIo()) {
+ optionals.set(0);
+ }
+ oprot.writeBitSet(optionals, 1);
+ if (struct.isSetIo()) {
+ struct.io.write(oprot);
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, increment_result struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ BitSet incoming = iprot.readBitSet(1);
+ if (incoming.get(0)) {
+ struct.io = new IOError();
+ struct.io.read(iprot);
+ struct.setIoIsSet(true);
+ }
+ }
+ }
+
+ }
+
+ public static class incrementRows_args implements org.apache.thrift.TBase<incrementRows_args, incrementRows_args._Fields>, java.io.Serializable, Cloneable {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("incrementRows_args");
+
+ private static final org.apache.thrift.protocol.TField INCREMENTS_FIELD_DESC = new org.apache.thrift.protocol.TField("increments", org.apache.thrift.protocol.TType.LIST, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new incrementRows_argsStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new incrementRows_argsTupleSchemeFactory());
+ }
+
+ /**
+ * The list of increments
+ */
+ public List<TIncrement> increments; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ /**
+ * The list of increments
+ */
+ INCREMENTS((short)1, "increments");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // INCREMENTS
+ return INCREMENTS;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.INCREMENTS, new org.apache.thrift.meta_data.FieldMetaData("increments", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TIncrement.class))));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(incrementRows_args.class, metaDataMap);
+ }
+
+ public incrementRows_args() {
+ }
+
+ public incrementRows_args(
+ List<TIncrement> increments)
+ {
+ this();
+ this.increments = increments;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public incrementRows_args(incrementRows_args other) {
+ if (other.isSetIncrements()) {
+ List<TIncrement> __this__increments = new ArrayList<TIncrement>();
+ for (TIncrement other_element : other.increments) {
+ __this__increments.add(new TIncrement(other_element));
+ }
+ this.increments = __this__increments;
+ }
+ }
+
+ public incrementRows_args deepCopy() {
+ return new incrementRows_args(this);
+ }
+
+ @Override
+ public void clear() {
+ this.increments = null;
+ }
+
+ public int getIncrementsSize() {
+ return (this.increments == null) ? 0 : this.increments.size();
+ }
+
+ public java.util.Iterator<TIncrement> getIncrementsIterator() {
+ return (this.increments == null) ? null : this.increments.iterator();
+ }
+
+ public void addToIncrements(TIncrement elem) {
+ if (this.increments == null) {
+ this.increments = new ArrayList<TIncrement>();
+ }
+ this.increments.add(elem);
+ }
+
+ /**
+ * The list of increments
+ */
+ public List<TIncrement> getIncrements() {
+ return this.increments;
+ }
+
+ /**
+ * The list of increments
+ */
+ public incrementRows_args setIncrements(List<TIncrement> increments) {
+ this.increments = increments;
+ return this;
+ }
+
+ public void unsetIncrements() {
+ this.increments = null;
+ }
+
+ /** Returns true if field increments is set (has been assigned a value) and false otherwise */
+ public boolean isSetIncrements() {
+ return this.increments != null;
+ }
+
+ public void setIncrementsIsSet(boolean value) {
+ if (!value) {
+ this.increments = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case INCREMENTS:
+ if (value == null) {
+ unsetIncrements();
+ } else {
+ setIncrements((List<TIncrement>)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case INCREMENTS:
+ return getIncrements();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case INCREMENTS:
+ return isSetIncrements();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof incrementRows_args)
+ return this.equals((incrementRows_args)that);
+ return false;
+ }
+
+ public boolean equals(incrementRows_args that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_increments = true && this.isSetIncrements();
+ boolean that_present_increments = true && that.isSetIncrements();
+ if (this_present_increments || that_present_increments) {
+ if (!(this_present_increments && that_present_increments))
+ return false;
+ if (!this.increments.equals(that.increments))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return 0;
+ }
+
+ public int compareTo(incrementRows_args other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+ incrementRows_args typedOther = (incrementRows_args)other;
+
+ lastComparison = Boolean.valueOf(isSetIncrements()).compareTo(typedOther.isSetIncrements());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetIncrements()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.increments, typedOther.increments);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("incrementRows_args(");
+ boolean first = true;
+
+ sb.append("increments:");
+ if (this.increments == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.increments);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class incrementRows_argsStandardSchemeFactory implements SchemeFactory {
+ public incrementRows_argsStandardScheme getScheme() {
+ return new incrementRows_argsStandardScheme();
+ }
+ }
+
+ private static class incrementRows_argsStandardScheme extends StandardScheme<incrementRows_args> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, incrementRows_args struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // INCREMENTS
+ if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+ {
+ org.apache.thrift.protocol.TList _list424 = iprot.readListBegin();
+ struct.increments = new ArrayList<TIncrement>(_list424.size);
+ for (int _i425 = 0; _i425 < _list424.size; ++_i425)
+ {
+ TIncrement _elem426; // optional
+ _elem426 = new TIncrement();
+ _elem426.read(iprot);
+ struct.increments.add(_elem426);
+ }
+ iprot.readListEnd();
+ }
+ struct.setIncrementsIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+
+ // check for required fields of primitive type, which can't be checked in the validate method
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, incrementRows_args struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.increments != null) {
+ oprot.writeFieldBegin(INCREMENTS_FIELD_DESC);
+ {
+ oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.increments.size()));
+ for (TIncrement _iter427 : struct.increments)
+ {
+ _iter427.write(oprot);
+ }
+ oprot.writeListEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class incrementRows_argsTupleSchemeFactory implements SchemeFactory {
+ public incrementRows_argsTupleScheme getScheme() {
+ return new incrementRows_argsTupleScheme();
+ }
+ }
+
+ private static class incrementRows_argsTupleScheme extends TupleScheme<incrementRows_args> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, incrementRows_args struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ BitSet optionals = new BitSet();
+ if (struct.isSetIncrements()) {
+ optionals.set(0);
+ }
+ oprot.writeBitSet(optionals, 1);
+ if (struct.isSetIncrements()) {
+ {
+ oprot.writeI32(struct.increments.size());
+ for (TIncrement _iter428 : struct.increments)
+ {
+ _iter428.write(oprot);
+ }
+ }
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, incrementRows_args struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ BitSet incoming = iprot.readBitSet(1);
+ if (incoming.get(0)) {
+ {
+ org.apache.thrift.protocol.TList _list429 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.increments = new ArrayList<TIncrement>(_list429.size);
+ for (int _i430 = 0; _i430 < _list429.size; ++_i430)
+ {
+ TIncrement _elem431; // optional
+ _elem431 = new TIncrement();
+ _elem431.read(iprot);
+ struct.increments.add(_elem431);
+ }
+ }
+ struct.setIncrementsIsSet(true);
+ }
+ }
+ }
+
+ }
+
+ public static class incrementRows_result implements org.apache.thrift.TBase<incrementRows_result, incrementRows_result._Fields>, java.io.Serializable, Cloneable {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("incrementRows_result");
+
+ private static final org.apache.thrift.protocol.TField IO_FIELD_DESC = new org.apache.thrift.protocol.TField("io", org.apache.thrift.protocol.TType.STRUCT, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new incrementRows_resultStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new incrementRows_resultTupleSchemeFactory());
+ }
+
+ public IOError io; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ IO((short)1, "io");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // IO
+ return IO;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.IO, new org.apache.thrift.meta_data.FieldMetaData("io", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(incrementRows_result.class, metaDataMap);
+ }
+
+ public incrementRows_result() {
+ }
+
+ public incrementRows_result(
+ IOError io)
+ {
+ this();
+ this.io = io;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public incrementRows_result(incrementRows_result other) {
+ if (other.isSetIo()) {
+ this.io = new IOError(other.io);
+ }
+ }
+
+ public incrementRows_result deepCopy() {
+ return new incrementRows_result(this);
+ }
+
+ @Override
+ public void clear() {
+ this.io = null;
+ }
+
+ public IOError getIo() {
+ return this.io;
+ }
+
+ public incrementRows_result setIo(IOError io) {
+ this.io = io;
+ return this;
+ }
+
+ public void unsetIo() {
+ this.io = null;
+ }
+
+ /** Returns true if field io is set (has been assigned a value) and false otherwise */
+ public boolean isSetIo() {
+ return this.io != null;
+ }
+
+ public void setIoIsSet(boolean value) {
+ if (!value) {
+ this.io = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case IO:
+ if (value == null) {
[... 1972 lines stripped ...]