You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2013/08/28 23:05:53 UTC
svn commit: r1518383 [3/3] - in /hbase/branches/0.95:
hbase-client/src/main/java/org/apache/hadoop/hbase/client/
hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/
hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/
hbase-client/src/test/...
Modified: hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java (original)
+++ hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java Wed Aug 28 21:05:52 2013
@@ -730,31 +730,31 @@ public final class RPCProtos {
com.google.protobuf.ByteString
getServiceNameBytes();
- // optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];
+ // optional string cell_block_codec_class = 3;
/**
- * <code>optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];</code>
+ * <code>optional string cell_block_codec_class = 3;</code>
*
* <pre>
* Cell block codec we will use sending over optional cell blocks. Server throws exception
- * if cannot deal.
+ * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
* </pre>
*/
boolean hasCellBlockCodecClass();
/**
- * <code>optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];</code>
+ * <code>optional string cell_block_codec_class = 3;</code>
*
* <pre>
* Cell block codec we will use sending over optional cell blocks. Server throws exception
- * if cannot deal.
+ * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
* </pre>
*/
java.lang.String getCellBlockCodecClass();
/**
- * <code>optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];</code>
+ * <code>optional string cell_block_codec_class = 3;</code>
*
* <pre>
* Cell block codec we will use sending over optional cell blocks. Server throws exception
- * if cannot deal.
+ * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
* </pre>
*/
com.google.protobuf.ByteString
@@ -766,7 +766,7 @@ public final class RPCProtos {
*
* <pre>
* Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- * Class must implement hadoop's CompressionCodec Interface
+ * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
* </pre>
*/
boolean hasCellBlockCompressorClass();
@@ -775,7 +775,7 @@ public final class RPCProtos {
*
* <pre>
* Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- * Class must implement hadoop's CompressionCodec Interface
+ * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
* </pre>
*/
java.lang.String getCellBlockCompressorClass();
@@ -784,7 +784,7 @@ public final class RPCProtos {
*
* <pre>
* Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- * Class must implement hadoop's CompressionCodec Interface
+ * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
* </pre>
*/
com.google.protobuf.ByteString
@@ -978,26 +978,26 @@ public final class RPCProtos {
}
}
- // optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];
+ // optional string cell_block_codec_class = 3;
public static final int CELL_BLOCK_CODEC_CLASS_FIELD_NUMBER = 3;
private java.lang.Object cellBlockCodecClass_;
/**
- * <code>optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];</code>
+ * <code>optional string cell_block_codec_class = 3;</code>
*
* <pre>
* Cell block codec we will use sending over optional cell blocks. Server throws exception
- * if cannot deal.
+ * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
* </pre>
*/
public boolean hasCellBlockCodecClass() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
- * <code>optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];</code>
+ * <code>optional string cell_block_codec_class = 3;</code>
*
* <pre>
* Cell block codec we will use sending over optional cell blocks. Server throws exception
- * if cannot deal.
+ * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
* </pre>
*/
public java.lang.String getCellBlockCodecClass() {
@@ -1015,11 +1015,11 @@ public final class RPCProtos {
}
}
/**
- * <code>optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];</code>
+ * <code>optional string cell_block_codec_class = 3;</code>
*
* <pre>
* Cell block codec we will use sending over optional cell blocks. Server throws exception
- * if cannot deal.
+ * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
* </pre>
*/
public com.google.protobuf.ByteString
@@ -1044,7 +1044,7 @@ public final class RPCProtos {
*
* <pre>
* Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- * Class must implement hadoop's CompressionCodec Interface
+ * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
* </pre>
*/
public boolean hasCellBlockCompressorClass() {
@@ -1055,7 +1055,7 @@ public final class RPCProtos {
*
* <pre>
* Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- * Class must implement hadoop's CompressionCodec Interface
+ * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
* </pre>
*/
public java.lang.String getCellBlockCompressorClass() {
@@ -1077,7 +1077,7 @@ public final class RPCProtos {
*
* <pre>
* Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- * Class must implement hadoop's CompressionCodec Interface
+ * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
* </pre>
*/
public com.google.protobuf.ByteString
@@ -1097,7 +1097,7 @@ public final class RPCProtos {
private void initFields() {
userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
serviceName_ = "";
- cellBlockCodecClass_ = "org.apache.hadoop.hbase.codec.KeyValueCodec";
+ cellBlockCodecClass_ = "";
cellBlockCompressorClass_ = "";
}
private byte memoizedIsInitialized = -1;
@@ -1349,7 +1349,7 @@ public final class RPCProtos {
bitField0_ = (bitField0_ & ~0x00000001);
serviceName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
- cellBlockCodecClass_ = "org.apache.hadoop.hbase.codec.KeyValueCodec";
+ cellBlockCodecClass_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
cellBlockCompressorClass_ = "";
bitField0_ = (bitField0_ & ~0x00000008);
@@ -1659,25 +1659,25 @@ public final class RPCProtos {
return this;
}
- // optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];
- private java.lang.Object cellBlockCodecClass_ = "org.apache.hadoop.hbase.codec.KeyValueCodec";
+ // optional string cell_block_codec_class = 3;
+ private java.lang.Object cellBlockCodecClass_ = "";
/**
- * <code>optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];</code>
+ * <code>optional string cell_block_codec_class = 3;</code>
*
* <pre>
* Cell block codec we will use sending over optional cell blocks. Server throws exception
- * if cannot deal.
+ * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
* </pre>
*/
public boolean hasCellBlockCodecClass() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
- * <code>optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];</code>
+ * <code>optional string cell_block_codec_class = 3;</code>
*
* <pre>
* Cell block codec we will use sending over optional cell blocks. Server throws exception
- * if cannot deal.
+ * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
* </pre>
*/
public java.lang.String getCellBlockCodecClass() {
@@ -1692,11 +1692,11 @@ public final class RPCProtos {
}
}
/**
- * <code>optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];</code>
+ * <code>optional string cell_block_codec_class = 3;</code>
*
* <pre>
* Cell block codec we will use sending over optional cell blocks. Server throws exception
- * if cannot deal.
+ * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
* </pre>
*/
public com.google.protobuf.ByteString
@@ -1713,11 +1713,11 @@ public final class RPCProtos {
}
}
/**
- * <code>optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];</code>
+ * <code>optional string cell_block_codec_class = 3;</code>
*
* <pre>
* Cell block codec we will use sending over optional cell blocks. Server throws exception
- * if cannot deal.
+ * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
* </pre>
*/
public Builder setCellBlockCodecClass(
@@ -1731,11 +1731,11 @@ public final class RPCProtos {
return this;
}
/**
- * <code>optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];</code>
+ * <code>optional string cell_block_codec_class = 3;</code>
*
* <pre>
* Cell block codec we will use sending over optional cell blocks. Server throws exception
- * if cannot deal.
+ * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
* </pre>
*/
public Builder clearCellBlockCodecClass() {
@@ -1745,11 +1745,11 @@ public final class RPCProtos {
return this;
}
/**
- * <code>optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];</code>
+ * <code>optional string cell_block_codec_class = 3;</code>
*
* <pre>
* Cell block codec we will use sending over optional cell blocks. Server throws exception
- * if cannot deal.
+ * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
* </pre>
*/
public Builder setCellBlockCodecClassBytes(
@@ -1770,7 +1770,7 @@ public final class RPCProtos {
*
* <pre>
* Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- * Class must implement hadoop's CompressionCodec Interface
+ * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
* </pre>
*/
public boolean hasCellBlockCompressorClass() {
@@ -1781,7 +1781,7 @@ public final class RPCProtos {
*
* <pre>
* Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- * Class must implement hadoop's CompressionCodec Interface
+ * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
* </pre>
*/
public java.lang.String getCellBlockCompressorClass() {
@@ -1800,7 +1800,7 @@ public final class RPCProtos {
*
* <pre>
* Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- * Class must implement hadoop's CompressionCodec Interface
+ * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
* </pre>
*/
public com.google.protobuf.ByteString
@@ -1821,7 +1821,7 @@ public final class RPCProtos {
*
* <pre>
* Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- * Class must implement hadoop's CompressionCodec Interface
+ * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
* </pre>
*/
public Builder setCellBlockCompressorClass(
@@ -1839,7 +1839,7 @@ public final class RPCProtos {
*
* <pre>
* Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- * Class must implement hadoop's CompressionCodec Interface
+ * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
* </pre>
*/
public Builder clearCellBlockCompressorClass() {
@@ -1853,7 +1853,7 @@ public final class RPCProtos {
*
* <pre>
* Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- * Class must implement hadoop's CompressionCodec Interface
+ * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
* </pre>
*/
public Builder setCellBlockCompressorClassBytes(
@@ -6002,25 +6002,24 @@ public final class RPCProtos {
java.lang.String[] descriptorData = {
"\n\tRPC.proto\032\rTracing.proto\032\013hbase.proto\"" +
"<\n\017UserInformation\022\026\n\016effective_user\030\001 \002" +
- "(\t\022\021\n\treal_user\030\002 \001(\t\"\277\001\n\020ConnectionHead" +
+ "(\t\022\021\n\treal_user\030\002 \001(\t\"\222\001\n\020ConnectionHead" +
"er\022#\n\tuser_info\030\001 \001(\0132\020.UserInformation\022" +
- "\024\n\014service_name\030\002 \001(\t\022K\n\026cell_block_code" +
- "c_class\030\003 \001(\t:+org.apache.hadoop.hbase.c" +
- "odec.KeyValueCodec\022#\n\033cell_block_compres" +
- "sor_class\030\004 \001(\t\"\037\n\rCellBlockMeta\022\016\n\006leng" +
- "th\030\001 \001(\r\"|\n\021ExceptionResponse\022\034\n\024excepti" +
- "on_class_name\030\001 \001(\t\022\023\n\013stack_trace\030\002 \001(\t",
- "\022\020\n\010hostname\030\003 \001(\t\022\014\n\004port\030\004 \001(\005\022\024\n\014do_n" +
- "ot_retry\030\005 \001(\010\"\254\001\n\rRequestHeader\022\017\n\007call" +
- "_id\030\001 \001(\r\022\035\n\ntrace_info\030\002 \001(\0132\t.RPCTInfo" +
- "\022\023\n\013method_name\030\003 \001(\t\022\025\n\rrequest_param\030\004" +
- " \001(\010\022\'\n\017cell_block_meta\030\005 \001(\0132\016.CellBloc" +
- "kMeta\022\026\n\016effective_user\030\006 \001(\t\"q\n\016Respons" +
- "eHeader\022\017\n\007call_id\030\001 \001(\r\022%\n\texception\030\002 " +
- "\001(\0132\022.ExceptionResponse\022\'\n\017cell_block_me" +
- "ta\030\003 \001(\0132\016.CellBlockMetaB<\n*org.apache.h" +
- "adoop.hbase.protobuf.generatedB\tRPCProto",
- "sH\001\240\001\001"
+ "\024\n\014service_name\030\002 \001(\t\022\036\n\026cell_block_code" +
+ "c_class\030\003 \001(\t\022#\n\033cell_block_compressor_c" +
+ "lass\030\004 \001(\t\"\037\n\rCellBlockMeta\022\016\n\006length\030\001 " +
+ "\001(\r\"|\n\021ExceptionResponse\022\034\n\024exception_cl" +
+ "ass_name\030\001 \001(\t\022\023\n\013stack_trace\030\002 \001(\t\022\020\n\010h" +
+ "ostname\030\003 \001(\t\022\014\n\004port\030\004 \001(\005\022\024\n\014do_not_re",
+ "try\030\005 \001(\010\"\254\001\n\rRequestHeader\022\017\n\007call_id\030\001" +
+ " \001(\r\022\035\n\ntrace_info\030\002 \001(\0132\t.RPCTInfo\022\023\n\013m" +
+ "ethod_name\030\003 \001(\t\022\025\n\rrequest_param\030\004 \001(\010\022" +
+ "\'\n\017cell_block_meta\030\005 \001(\0132\016.CellBlockMeta" +
+ "\022\026\n\016effective_user\030\006 \001(\t\"q\n\016ResponseHead" +
+ "er\022\017\n\007call_id\030\001 \001(\r\022%\n\texception\030\002 \001(\0132\022" +
+ ".ExceptionResponse\022\'\n\017cell_block_meta\030\003 " +
+ "\001(\0132\016.CellBlockMetaB<\n*org.apache.hadoop" +
+ ".hbase.protobuf.generatedB\tRPCProtosH\001\240\001" +
+ "\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
Modified: hbase/branches/0.95/hbase-protocol/src/main/protobuf/Client.proto
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/protobuf/Client.proto?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-protocol/src/main/protobuf/Client.proto (original)
+++ hbase/branches/0.95/hbase-protocol/src/main/protobuf/Client.proto Wed Aug 28 21:05:52 2013
@@ -263,14 +263,21 @@ message ScanRequest {
* be false. If it is not specified, it means there are more.
*/
message ScanResponse {
- optional ResultCellMeta result_cell_meta = 1;
+ // This field is filled in if we are doing cellblocks. A cellblock is made up
+ // of all Cells serialized out as one cellblock BUT responses from a server
+ // have their Cells grouped by Result. So we can reconstitute the
+ // Results on the client-side, this field is a list of counts of Cells
+ // in each Result that makes up the response. For example, if this field
+ // has 3, 3, 3 in it, then we know that on the client, we are to make
+ // three Results each of three Cells each.
+ repeated uint32 cells_per_result = 1;
optional uint64 scanner_id = 2;
optional bool more_results = 3;
optional uint32 ttl = 4;
-}
-
-message ResultCellMeta {
- repeated uint32 cells_length = 1;
+ // If cells are not carried in an accompanying cellblock, then they are pb'd here.
+ // This field is mutually exclusive with cells_per_result (since the Cells will
+ // be inside the pb'd Result)
+ repeated Result results = 5;
}
/**
Modified: hbase/branches/0.95/hbase-protocol/src/main/protobuf/RPC.proto
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/protobuf/RPC.proto?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-protocol/src/main/protobuf/RPC.proto (original)
+++ hbase/branches/0.95/hbase-protocol/src/main/protobuf/RPC.proto Wed Aug 28 21:05:52 2013
@@ -81,10 +81,10 @@ message ConnectionHeader {
optional UserInformation user_info = 1;
optional string service_name = 2;
// Cell block codec we will use sending over optional cell blocks. Server throws exception
- // if cannot deal.
- optional string cell_block_codec_class = 3 [default = "org.apache.hadoop.hbase.codec.KeyValueCodec"];
+ // if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+ optional string cell_block_codec_class = 3;
// Compressor we will use if cell block is compressed. Server will throw exception if not supported.
- // Class must implement hadoop's CompressionCodec Interface
+ // Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec.
optional string cell_block_compressor_class = 4;
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java Wed Aug 28 21:05:52 2013
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HRegionIn
import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcCallContext.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcCallContext.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcCallContext.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcCallContext.java Wed Aug 28 21:05:52 2013
@@ -25,4 +25,14 @@ public interface RpcCallContext extends
* @throws CallerDisconnectedException
*/
void throwExceptionIfCallerDisconnected(String regionName) throws CallerDisconnectedException;
-}
+
+ /**
+ * If the client connected and specified a codec to use, then we will use this codec making
+ * cellblocks to return. If the client did not specify a codec, we assume it does not support
+ * cellblocks and will return all content protobuf'd (though it makes our serving slower).
+ * We need to ask this question per call because a server could be hosting both clients that
+ * support cellblocks while fielding requests from clients that do not.
+ * @return True if the client supports cellblocks, else return all content in pb
+ */
+ boolean isClientCellBlockSupport();
+}
\ No newline at end of file
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java Wed Aug 28 21:05:52 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.ipc;
import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION;
+import com.google.common.collect.Lists;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
@@ -61,7 +62,6 @@ import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer;
-import com.google.common.collect.Lists;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -203,8 +203,8 @@ public class RpcServer implements RpcSer
protected final Configuration conf;
+ private final int maxQueueSize;
private int maxQueueLength;
- private int maxQueueSize;
protected int socketSendBufferSize;
protected final boolean tcpNoDelay; // if T then disable Nagle's Algorithm
protected final boolean tcpKeepAlive; // if T then use keepalives
@@ -450,6 +450,11 @@ public class RpcServer implements RpcSer
}
@Override
+ public boolean isClientCellBlockSupport() {
+ return this.connection != null && this.connection.codec != null;
+ }
+
+ @Override
public void throwExceptionIfCallerDisconnected(String regionName)
throws CallerDisconnectedException {
if (!connection.channel.isOpen()) {
@@ -1568,7 +1573,9 @@ public class RpcServer implements RpcSer
private void setupCellBlockCodecs(final ConnectionHeader header)
throws FatalConnectionException {
// TODO: Plug in other supported decoders.
+ if (!header.hasCellBlockCodecClass()) return;
String className = header.getCellBlockCodecClass();
+ if (className == null || className.length() == 0) return;
try {
this.codec = (Codec)Class.forName(className).newInstance();
} catch (Exception e) {
@@ -2425,9 +2432,10 @@ public class RpcServer implements RpcSer
}
/**
- * Needed for delayed calls. We need to be able to store the current call
- * so that we can complete it later.
- * @return Call the server is currently handling.
+ * Needed for features such as delayed calls. We need to be able to store the current call
+ * so that we can complete it later or ask questions of what is supported by the current ongoing
+ * call.
+ * @return An RpcCallConext backed by the currently ongoing call (gotten from a thread local)
*/
public static RpcCallContext getCurrentCall() {
return CurCall.get();
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java Wed Aug 28 21:05:52 2013
@@ -25,12 +25,12 @@ import java.net.InetSocketAddress;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
-import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.security.authorize.PolicyProvider;
-import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader;
import com.google.common.base.Function;
+import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.BlockingService;
import com.google.protobuf.Descriptors.MethodDescriptor;
import com.google.protobuf.Message;
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java Wed Aug 28 21:05:52 2013
@@ -59,7 +59,6 @@ import org.apache.hadoop.hbase.CellScann
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.Chore;
-import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
@@ -104,6 +103,7 @@ import org.apache.hadoop.hbase.fs.HFileS
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController;
+import org.apache.hadoop.hbase.ipc.RpcCallContext;
import org.apache.hadoop.hbase.ipc.RpcClient;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
@@ -163,7 +163,6 @@ import org.apache.hadoop.hbase.protobuf.
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultCellMeta;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse;
@@ -2947,8 +2946,8 @@ public class HRegionServer implements Cl
* @throws ServiceException
*/
@Override
- public ScanResponse scan(final RpcController controller,
- final ScanRequest request) throws ServiceException {
+ public ScanResponse scan(final RpcController controller, final ScanRequest request)
+ throws ServiceException {
Leases.Lease lease = null;
String scannerName = null;
try {
@@ -3011,7 +3010,7 @@ public class HRegionServer implements Cl
if (!isLoadingCfsOnDemandSet) {
scan.setLoadColumnFamiliesOnDemand(region.isLoadingCfsOnDemandDefault());
}
- byte[] hasMetrics = scan.getAttribute(Scan.SCAN_ATTRIBUTES_METRICS_ENABLE);
+ scan.getAttribute(Scan.SCAN_ATTRIBUTES_METRICS_ENABLE);
region.prepareScanner(scan);
if (region.getCoprocessorHost() != null) {
scanner = region.getCoprocessorHost().preScannerOpen(scan);
@@ -3118,16 +3117,7 @@ public class HRegionServer implements Cl
moreResults = false;
results = null;
} else {
- ResultCellMeta.Builder rcmBuilder = ResultCellMeta.newBuilder();
- List<CellScannable> cellScannables = new ArrayList<CellScannable>(results.size());
- for (Result res : results) {
- cellScannables.add(res);
- rcmBuilder.addCellsLength(res.size());
- }
- builder.setResultCellMeta(rcmBuilder.build());
- // TODO is this okey to assume the type and cast
- ((PayloadCarryingRpcController) controller).setCellScanner(CellUtil
- .createCellScanner(cellScannables));
+ formatResults(builder, results, controller);
}
} finally {
// We're done. On way out re-add the above removed lease.
@@ -3175,6 +3165,26 @@ public class HRegionServer implements Cl
}
}
+ private void formatResults(final ScanResponse.Builder builder, final List<Result> results,
+ final RpcController controller) {
+ if (results == null || results.isEmpty()) return;
+ RpcCallContext context = RpcServer.getCurrentCall();
+ if (context != null && context.isClientCellBlockSupport()) {
+ List<CellScannable> cellScannables = new ArrayList<CellScannable>(results.size());
+ for (Result res : results) {
+ cellScannables.add(res);
+ builder.addCellsPerResult(res.size());
+ }
+ ((PayloadCarryingRpcController)controller).
+ setCellScanner(CellUtil.createCellScanner(cellScannables));
+ } else {
+ for (Result res: results) {
+ ClientProtos.Result pbr = ProtobufUtil.toResult(res);
+ builder.addResults(pbr);
+ }
+ }
+ }
+
/**
* Atomically bulk load several HFiles into an open region
* @return true if successful, false is failed but recoverably (no action)
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCheckTestClasses.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCheckTestClasses.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCheckTestClasses.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCheckTestClasses.java Wed Aug 28 21:05:52 2013
@@ -18,23 +18,12 @@
package org.apache.hadoop.hbase;
-import static junit.framework.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
-import java.io.File;
-import java.io.FileFilter;
-import java.io.IOException;
-import java.lang.reflect.Method;
-import java.lang.reflect.Modifier;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Enumeration;
import java.util.List;
-import java.util.regex.Pattern;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import org.junit.runners.Suite;
/**
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java Wed Aug 28 21:05:52 2013
@@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.client.HC
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultCellMeta;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse;
import org.apache.hadoop.hbase.util.Bytes;
@@ -167,11 +166,9 @@ public class TestMetaReaderEditorNoClust
final List<CellScannable> cellScannables = new ArrayList<CellScannable>(1);
cellScannables.add(new Result(kvs));
final ScanResponse.Builder builder = ScanResponse.newBuilder();
- ResultCellMeta.Builder metaBuilder = ResultCellMeta.newBuilder();
for (CellScannable result : cellScannables) {
- metaBuilder.addCellsLength(((Result)result).size());
+ builder.addCellsPerResult(((Result)result).size());
}
- builder.setResultCellMeta(metaBuilder.build());
Mockito.when(implementation.scan((RpcController) Mockito.any(), (ScanRequest) Mockito.any()))
.thenThrow(new ServiceException("Server not running (1 of 3)"))
.thenThrow(new ServiceException("Server not running (2 of 3)"))
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java Wed Aug 28 21:05:52 2013
@@ -40,9 +40,6 @@ import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.concurrent.SynchronousQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.lang.ArrayUtils;
@@ -51,7 +48,6 @@ import org.apache.commons.logging.LogFac
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -63,6 +59,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.LargeTests;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
@@ -94,7 +91,6 @@ import org.apache.hadoop.hbase.regionser
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.log4j.Level;
import org.junit.After;
Added: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java?rev=1518383&view=auto
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java (added)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java Wed Aug 28 21:05:52 2013
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.ipc.RpcClient;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Do some ops and prove that client and server can work w/o codecs; that we can pb all the time.
+ * Good for third-party clients or simple scripts that want to talk direct to hbase.
+ */
+@Category(MediumTests.class)
+public class TestFromClientSideNoCodec {
+ protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+ /**
+ * @throws java.lang.Exception
+ */
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ // Turn off codec use
+ TEST_UTIL.getConfiguration().set("hbase.client.default.rpc.codec", "");
+ TEST_UTIL.startMiniCluster(1);
+ }
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @AfterClass
+ public static void tearDownAfterClass() throws Exception {
+ TEST_UTIL.shutdownMiniCluster();
+ }
+
+ @Test
+ public void testBasics() throws IOException {
+ final byte [] t = Bytes.toBytes("testBasics");
+ final byte [][] fs = new byte[][] {Bytes.toBytes("cf1"), Bytes.toBytes("cf2"),
+ Bytes.toBytes("cf3") };
+ HTable ht = TEST_UTIL.createTable(t, fs);
+ // Check put and get.
+ final byte [] row = Bytes.toBytes("row");
+ Put p = new Put(row);
+ for (byte [] f: fs) p.add(f, f, f);
+ ht.put(p);
+ Result r = ht.get(new Get(row));
+ int i = 0;
+ for (CellScanner cellScanner = r.cellScanner(); cellScanner.advance();) {
+ Cell cell = cellScanner.current();
+ byte [] f = fs[i++];
+ assertTrue(Bytes.toString(f),
+ Bytes.equals(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(),
+ f, 0, f.length));
+ }
+ // Check getRowOrBefore
+ byte [] f = fs[0];
+ r = ht.getRowOrBefore(row, f);
+ assertTrue(r.toString(), r.containsColumn(f, f));
+ // Check scan.
+ ResultScanner scanner = ht.getScanner(new Scan());
+ int count = 0;
+ while ((r = scanner.next()) != null) {
+ assertTrue(r.list().size() == 3);
+ count++;
+ }
+ assertTrue(count == 1);
+ }
+
+ @Test
+ public void testNoCodec() {
+ Configuration c = new Configuration();
+ c.set("hbase.client.default.rpc.codec", "");
+ String codec = RpcClient.getDefaultCodec(c);
+ assertTrue(codec == null || codec.length() == 0);
+ }
+}
\ No newline at end of file
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java Wed Aug 28 21:05:52 2013
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.coproces
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.Collection;
@@ -41,7 +40,6 @@ import org.apache.hadoop.hbase.HRegionIn
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MediumTests;
-import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
@@ -64,7 +62,7 @@ import org.apache.hadoop.hbase.util.Byte
import org.junit.Test;
import org.junit.experimental.categories.Category;
-@Category(SmallTests.class)
+@Category(MediumTests.class)
public class TestRegionObserverScannerOpenHook {
private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
static final Path DIR = UTIL.getDataTestDir();
@@ -212,7 +210,6 @@ public class TestRegionObserverScannerOp
* region
*/
@Test
- @Category(MediumTests.class)
public void testRegionObserverCompactionTimeStacking() throws Exception {
// setup a mini cluster so we can do a real compaction on a region
Configuration conf = UTIL.getConfiguration();
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java Wed Aug 28 21:05:52 2013
@@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.KeyValueU
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RowMutations;
+import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto;
import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto;
import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto;
@@ -112,13 +113,16 @@ public class TestIPC {
// an echo, just put them back on the controller creating a new block. Tests our block
// building.
CellScanner cellScanner = pcrc.cellScanner();
- List<Cell> list = new ArrayList<Cell>();
- try {
- while(cellScanner.advance()) {
- list.add(cellScanner.current());
+ List<Cell> list = null;
+ if (cellScanner != null) {
+ list = new ArrayList<Cell>();
+ try {
+ while(cellScanner.advance()) {
+ list.add(cellScanner.current());
+ }
+ } catch (IOException e) {
+ throw new ServiceException(e);
}
- } catch (IOException e) {
- throw new ServiceException(e);
}
cellScanner = CellUtil.createCellScanner(list);
((PayloadCarryingRpcController)controller).setCellScanner(cellScanner);
@@ -148,6 +152,38 @@ public class TestIPC {
}
/**
+ * Ensure we do not HAVE TO HAVE a codec.
+ * @throws InterruptedException
+ * @throws IOException
+ */
+ @Test
+ public void testNoCodec() throws InterruptedException, IOException {
+ Configuration conf = HBaseConfiguration.create();
+ RpcClient client = new RpcClient(conf, HConstants.CLUSTER_ID_DEFAULT) {
+ @Override
+ Codec getCodec() {
+ return null;
+ }
+ };
+ TestRpcServer rpcServer = new TestRpcServer();
+ try {
+ rpcServer.start();
+ InetSocketAddress address = rpcServer.getListenerAddress();
+ MethodDescriptor md = SERVICE.getDescriptorForType().findMethodByName("echo");
+ final String message = "hello";
+ EchoRequestProto param = EchoRequestProto.newBuilder().setMessage(message).build();
+ Pair<Message, CellScanner> r = client.call(md, param, null,
+ md.getOutputType().toProto(), User.getCurrent(), address, 0);
+ assertTrue(r.getSecond() == null);
+ // Silly assertion that the message is in the returned pb.
+ assertTrue(r.getFirst().toString().contains(message));
+ } finally {
+ client.stop();
+ rpcServer.stop();
+ }
+ }
+
+ /**
* It is hard to verify the compression is actually happening under the wraps. Hope that if
* unsupported, we'll get an exception out of some time (meantime, have to trace it manually
* to confirm that compression is happening down in the client and server).
@@ -159,11 +195,14 @@ public class TestIPC {
@Test
public void testCompressCellBlock()
throws IOException, InterruptedException, SecurityException, NoSuchMethodException {
- // Currently, you set
- Configuration conf = HBaseConfiguration.create();
+ Configuration conf = new Configuration(HBaseConfiguration.create());
conf.set("hbase.client.rpc.compressor", GzipCodec.class.getCanonicalName());
+ doSimpleTest(conf, new RpcClient(conf, HConstants.CLUSTER_ID_DEFAULT));
+ }
+
+ private void doSimpleTest(final Configuration conf, final RpcClient client)
+ throws InterruptedException, IOException {
TestRpcServer rpcServer = new TestRpcServer();
- RpcClient client = new RpcClient(conf, HConstants.CLUSTER_ID_DEFAULT);
List<Cell> cells = new ArrayList<Cell>();
int count = 3;
for (int i = 0; i < count; i++) cells.add(CELL);
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java Wed Aug 28 21:05:52 2013
@@ -84,7 +84,6 @@ import org.apache.hadoop.hbase.protobuf.
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultCellMeta;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse;
import org.apache.hadoop.hbase.regionserver.CompactionRequestor;
@@ -382,9 +381,7 @@ ClientProtos.ClientService.BlockingInter
long scannerId = request.getScannerId();
Result result = next(scannerId);
if (result != null) {
- ResultCellMeta.Builder metaBuilder = ResultCellMeta.newBuilder();
- metaBuilder.addCellsLength(result.size());
- builder.setResultCellMeta(metaBuilder.build());
+ builder.addCellsPerResult(result.size());
List<CellScannable> results = new ArrayList<CellScannable>(1);
results.add(result);
((PayloadCarryingRpcController) controller).setCellScanner(CellUtil
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java Wed Aug 28 21:05:52 2013
@@ -64,7 +64,6 @@ import org.apache.hadoop.hbase.protobuf.
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultCellMeta;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table;
@@ -606,9 +605,7 @@ public class TestAssignmentManager {
final ScanResponse.Builder builder = ScanResponse.newBuilder();
builder.setMoreResults(true);
- ResultCellMeta.Builder metaBuilder = ResultCellMeta.newBuilder();
- metaBuilder.addCellsLength(r.size());
- builder.setResultCellMeta(metaBuilder.build());
+ builder.addCellsPerResult(r.size());
final List<CellScannable> cellScannables = new ArrayList<CellScannable>(1);
cellScannables.add(r);
Mockito.when(implementation.scan(
@@ -1077,9 +1074,7 @@ public class TestAssignmentManager {
Result r = MetaMockingUtil.getMetaTableRowResult(REGIONINFO, SERVERNAME_A);
final ScanResponse.Builder builder = ScanResponse.newBuilder();
builder.setMoreResults(true);
- ResultCellMeta.Builder metaBuilder = ResultCellMeta.newBuilder();
- metaBuilder.addCellsLength(r.size());
- builder.setResultCellMeta(metaBuilder.build());
+ builder.addCellsPerResult(r.size());
final List<CellScannable> rows = new ArrayList<CellScannable>(1);
rows.add(r);
Answer<ScanResponse> ans = new Answer<ClientProtos.ScanResponse>() {
Modified: hbase/branches/0.95/src/main/docbkx/rpc.xml
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/src/main/docbkx/rpc.xml?rev=1518383&r1=1518382&r2=1518383&view=diff
==============================================================================
Binary files - no diff available.