You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2014/08/30 02:00:44 UTC
[2/3] git commit: HBASE-11857 Restore
ReaderBase.initAfterCompression() and WALCellCodec.create(Configuration,
CompressionContext) (Ted Yu)
HBASE-11857 Restore ReaderBase.initAfterCompression() and WALCellCodec.create(Configuration, CompressionContext) (Ted Yu)
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2668cc46
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2668cc46
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2668cc46
Branch: refs/heads/branch-1
Commit: 2668cc46721c65d9c3ae961f7f176cd215439a5a
Parents: 8362e62
Author: Andrew Purtell <ap...@apache.org>
Authored: Fri Aug 29 16:53:19 2014 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Fri Aug 29 16:53:19 2014 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/codec/Codec.java | 3 ++-
.../regionserver/wal/ProtobufLogReader.java | 8 +++++++-
.../hbase/regionserver/wal/ReaderBase.java | 8 +++++++-
.../regionserver/wal/SequenceFileLogReader.java | 8 +++++++-
.../hbase/regionserver/wal/WALCellCodec.java | 21 +++++++++++++++++++-
5 files changed, 43 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/2668cc46/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java
index d52ef11..1413b12 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java
@@ -22,6 +22,7 @@ import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.io.CellOutputStream;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
@@ -32,7 +33,7 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
* and without presuming an hfile context. Intent is an Interface that will work for hfile and
* rpc.
*/
-@InterfaceAudience.Private
+@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
public interface Codec {
// TODO: interfacing with {@link DataBlockEncoder}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/2668cc46/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
index e8a6898..3f2e6d6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.hbase.codec.Codec;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.io.LimitInputStream;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder;
@@ -54,7 +55,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
* which is appended at the end of the WAL. This is empty for now; it can contain some meta
* information such as Region level stats, etc in future.
*/
-@InterfaceAudience.Private
+@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
public class ProtobufLogReader extends ReaderBase {
private static final Log LOG = LogFactory.getLog(ProtobufLogReader.class);
static final byte[] PB_WAL_MAGIC = Bytes.toBytes("PWAL");
@@ -243,6 +244,11 @@ public class ProtobufLogReader extends ReaderBase {
}
@Override
+ protected void initAfterCompression() throws IOException {
+ initAfterCompression(null);
+ }
+
+ @Override
protected void initAfterCompression(String cellCodecClsName) throws IOException {
WALCellCodec codec = getCodec(this.conf, cellCodecClsName, this.compressionContext);
this.cellDecoder = codec.getDecoder(this.inputStream);
http://git-wip-us.apache.org/repos/asf/hbase/blob/2668cc46/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java
index 79dca1c..47378c8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java
@@ -28,12 +28,13 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.util.LRUDictionary;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer;
import org.apache.hadoop.hbase.util.FSUtils;
-@InterfaceAudience.Private
+@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
public abstract class ReaderBase implements HLog.Reader {
private static final Log LOG = LogFactory.getLog(ReaderBase.class);
protected Configuration conf;
@@ -141,6 +142,11 @@ public abstract class ReaderBase implements HLog.Reader {
/**
* Initializes the compression after the shared stuff has been initialized. Called once.
+ */
+ protected abstract void initAfterCompression() throws IOException;
+
+ /**
+ * Initializes the compression after the shared stuff has been initialized. Called once.
* @param cellCodecClsName class name of cell Codec
*/
protected abstract void initAfterCompression(String cellCodecClsName) throws IOException;
http://git-wip-us.apache.org/repos/asf/hbase/blob/2668cc46/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java
index 66e0c17..8b8a19b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java
@@ -32,12 +32,13 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.regionserver.wal.HLog.Entry;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.Metadata;
import org.apache.hadoop.io.Text;
-@InterfaceAudience.Private
+@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
public class SequenceFileLogReader extends ReaderBase {
private static final Log LOG = LogFactory.getLog(SequenceFileLogReader.class);
@@ -190,6 +191,11 @@ public class SequenceFileLogReader extends ReaderBase {
}
@Override
+ protected void initAfterCompression() throws IOException {
+ // Nothing to do here
+ }
+
+ @Override
protected boolean hasCompression() {
return isWALCompressionEnabled(reader.getMetadata());
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/2668cc46/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java
index a5a016e..93c33ad 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java
@@ -25,6 +25,7 @@ import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.BaseDecoder;
import org.apache.hadoop.hbase.codec.BaseEncoder;
@@ -46,7 +47,7 @@ import com.google.protobuf.ByteString;
* This codec is used at server side for writing cells to WAL as well as for sending edits
* as part of the distributed splitting process.
*/
-@InterfaceAudience.Private
+@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
public class WALCellCodec implements Codec {
/** Configuration key for the class to use when encoding cells in the WAL */
public static final String WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
@@ -102,6 +103,24 @@ public class WALCellCodec implements Codec {
{ Configuration.class, CompressionContext.class }, new Object[] { conf, compression });
}
+ /**
+ * Create and setup a {@link WALCellCodec} from the
+ * CompressionContext.
+ * Cell Codec classname is read from {@link Configuration}.
+ * Fully prepares the codec for use.
+ * @param conf {@link Configuration} to read for the user-specified codec. If none is specified,
+ * uses a {@link WALCellCodec}.
+ * @param compression compression the codec should use
+ * @return a {@link WALCellCodec} ready for use.
+ * @throws UnsupportedOperationException if the codec cannot be instantiated
+ */
+ public static WALCellCodec create(Configuration conf,
+ CompressionContext compression) throws UnsupportedOperationException {
+ String cellCodecClsName = getWALCellCodecClass(conf);
+ return ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[]
+ { Configuration.class, CompressionContext.class }, new Object[] { conf, compression });
+ }
+
public interface ByteStringCompressor {
ByteString compress(byte[] data, Dictionary dict) throws IOException;
}