You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jm...@apache.org on 2013/08/30 22:31:48 UTC
svn commit: r1519076 [3/6] - in /hbase/branches/0.95:
hbase-client/src/main/java/org/apache/hadoop/hbase/
hbase-client/src/main/java/org/apache/hadoop/hbase/client/
hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/
hbase-client/src...
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java Fri Aug 30 20:31:44 2013
@@ -25,6 +25,8 @@ import org.apache.hadoop.classification.
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
@@ -107,12 +109,12 @@ extends TableMapper<ImmutableBytesWritab
ArrayList<byte[]> foundList = new ArrayList<byte[]>();
int numCols = columns.length;
if (numCols > 0) {
- for (KeyValue value: r.list()) {
- byte [] column = KeyValue.makeColumn(value.getFamily(),
- value.getQualifier());
+ for (Cell value: r.list()) {
+ byte [] column = KeyValue.makeColumn(CellUtil.getFamilyArray(value),
+ CellUtil.getQualifierArray(value));
for (int i = 0; i < numCols; i++) {
if (Bytes.equals(column, columns[i])) {
- foundList.add(value.getValue());
+ foundList.add(CellUtil.getValueArray(value));
break;
}
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java Fri Aug 30 20:31:44 2013
@@ -34,8 +34,11 @@ import org.apache.hadoop.classification.
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTable;
@@ -90,12 +93,12 @@ public class Import {
Context context)
throws IOException {
try {
- for (KeyValue kv : value.raw()) {
+ for (Cell kv : value.raw()) {
kv = filterKv(kv);
// skip if we filtered it out
if (kv == null) continue;
-
- context.write(row, convertKv(kv, cfRenameMap));
+ // TODO get rid of ensureKeyValue
+ context.write(row, KeyValueUtil.ensureKeyValue(convertKv(kv, cfRenameMap)));
}
} catch (InterruptedException e) {
e.printStackTrace();
@@ -140,14 +143,14 @@ public class Import {
throws IOException, InterruptedException {
Put put = null;
Delete delete = null;
- for (KeyValue kv : result.raw()) {
+ for (Cell kv : result.raw()) {
kv = filterKv(kv);
// skip if we filter it out
if (kv == null) continue;
kv = convertKv(kv, cfRenameMap);
// Deletes and Puts are gathered and written when finished
- if (kv.isDelete()) {
+ if (CellUtil.isDelete(kv)) {
if (delete == null) {
delete = new Delete(key.get());
}
@@ -245,7 +248,7 @@ public class Import {
* @return <tt>null</tt> if the key should not be written, otherwise returns the original
* {@link KeyValue}
*/
- private static KeyValue filterKv(KeyValue kv) throws IOException {
+ private static Cell filterKv(Cell kv) throws IOException {
// apply the filter and skip this kv if the filter doesn't apply
if (filter != null) {
Filter.ReturnCode code = filter.filterKeyValue(kv);
@@ -261,23 +264,23 @@ public class Import {
}
// helper: create a new KeyValue based on CF rename map
- private static KeyValue convertKv(KeyValue kv, Map<byte[], byte[]> cfRenameMap) {
+ private static Cell convertKv(Cell kv, Map<byte[], byte[]> cfRenameMap) {
if(cfRenameMap != null) {
// If there's a rename mapping for this CF, create a new KeyValue
- byte[] newCfName = cfRenameMap.get(kv.getFamily());
+ byte[] newCfName = cfRenameMap.get(CellUtil.getFamilyArray(kv));
if(newCfName != null) {
- kv = new KeyValue(kv.getBuffer(), // row buffer
+ kv = new KeyValue(kv.getRowArray(), // row buffer
kv.getRowOffset(), // row offset
kv.getRowLength(), // row length
newCfName, // CF buffer
0, // CF offset
newCfName.length, // CF length
- kv.getBuffer(), // qualifier buffer
+ kv.getQualifierArray(), // qualifier buffer
kv.getQualifierOffset(), // qualifier offset
kv.getQualifierLength(), // qualifier length
kv.getTimestamp(), // timestamp
- KeyValue.Type.codeToType(kv.getType()), // KV Type
- kv.getBuffer(), // value buffer
+ KeyValue.Type.codeToType(kv.getTypeByte()), // KV Type
+ kv.getValueArray(), // value buffer
kv.getValueOffset(), // value offset
kv.getValueLength()); // value length
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java Fri Aug 30 20:31:44 2013
@@ -27,6 +27,7 @@ import org.apache.hadoop.classification.
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -94,11 +95,11 @@ public class TableNamespaceManager {
ResultScanner scanner = table.getScanner(HTableDescriptor.NAMESPACE_FAMILY_INFO_BYTES);
try {
for(Result result : scanner) {
+ byte[] val = CellUtil.getValueArray(result.getColumnLatest(HTableDescriptor.NAMESPACE_FAMILY_INFO_BYTES,
+ HTableDescriptor.NAMESPACE_COL_DESC_BYTES));
NamespaceDescriptor ns =
ProtobufUtil.toNamespaceDescriptor(
- HBaseProtos.NamespaceDescriptor.parseFrom(
- result.getColumnLatest(HTableDescriptor.NAMESPACE_FAMILY_INFO_BYTES,
- HTableDescriptor.NAMESPACE_COL_DESC_BYTES).getValue()));
+ HBaseProtos.NamespaceDescriptor.parseFrom(val));
zkNamespaceManager.update(ns);
}
} finally {
@@ -112,11 +113,11 @@ public class TableNamespaceManager {
if (res.isEmpty()) {
return null;
}
+ byte[] val = CellUtil.getValueArray(res.getColumnLatest(HTableDescriptor.NAMESPACE_FAMILY_INFO_BYTES,
+ HTableDescriptor.NAMESPACE_COL_DESC_BYTES));
return
ProtobufUtil.toNamespaceDescriptor(
- HBaseProtos.NamespaceDescriptor.parseFrom(
- res.getColumnLatest(HTableDescriptor.NAMESPACE_FAMILY_INFO_BYTES,
- HTableDescriptor.NAMESPACE_COL_DESC_BYTES).getValue()));
+ HBaseProtos.NamespaceDescriptor.parseFrom(val));
}
public synchronized void create(NamespaceDescriptor ns) throws IOException {
@@ -185,10 +186,10 @@ public class TableNamespaceManager {
ResultScanner scanner = table.getScanner(HTableDescriptor.NAMESPACE_FAMILY_INFO_BYTES);
try {
for(Result r : scanner) {
- ret.add(ProtobufUtil.toNamespaceDescriptor(
- HBaseProtos.NamespaceDescriptor.parseFrom(
- r.getColumnLatest(HTableDescriptor.NAMESPACE_FAMILY_INFO_BYTES,
- HTableDescriptor.NAMESPACE_COL_DESC_BYTES).getValue())));
+ byte[] val = CellUtil.getValueArray(r.getColumnLatest(HTableDescriptor.NAMESPACE_FAMILY_INFO_BYTES,
+ HTableDescriptor.NAMESPACE_COL_DESC_BYTES));
+ ret.add(ProtobufUtil.toNamespaceDescriptor(
+ HBaseProtos.NamespaceDescriptor.parseFrom(val)));
}
} finally {
scanner.close();
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java Fri Aug 30 20:31:44 2013
@@ -66,6 +66,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -1842,7 +1843,7 @@ public class HRegion implements HeapSize
get.setMaxVersions(count);
get.addColumn(family, qual);
- List<KeyValue> result = get(get, false);
+ List<Cell> result = get(get, false);
if (result.size() < count) {
// Nothing to delete
@@ -1852,7 +1853,7 @@ public class HRegion implements HeapSize
if (result.size() > count) {
throw new RuntimeException("Unexpected size: " + result.size());
}
- KeyValue getkv = result.get(count - 1);
+ KeyValue getkv = KeyValueUtil.ensureKeyValue(result.get(count - 1));
Bytes.putBytes(kv.getBuffer(), kv.getTimestampOffset(),
getkv.getBuffer(), getkv.getTimestampOffset(), Bytes.SIZEOF_LONG);
} else {
@@ -2372,7 +2373,7 @@ public class HRegion implements HeapSize
RowLock rowLock = getRowLock(get.getRow());
// wait for all previous transactions to complete (with lock held)
mvcc.completeMemstoreInsert(mvcc.beginMemstoreInsert());
- List<KeyValue> result = null;
+ List<Cell> result;
try {
result = get(get, false);
@@ -2381,12 +2382,12 @@ public class HRegion implements HeapSize
boolean matches = false;
if (result.size() == 0 && valueIsNull) {
matches = true;
- } else if (result.size() > 0 && result.get(0).getValue().length == 0 &&
+ } else if (result.size() > 0 && result.get(0).getValueLength() == 0 &&
valueIsNull) {
matches = true;
} else if (result.size() == 1 && !valueIsNull) {
- KeyValue kv = result.get(0);
- int compareResult = comparator.compareTo(kv.getBuffer(),
+ Cell kv = result.get(0);
+ int compareResult = comparator.compareTo(kv.getValueArray(),
kv.getValueOffset(), kv.getValueLength());
switch (compareOp) {
case LESS:
@@ -3511,14 +3512,14 @@ public class HRegion implements HeapSize
}
@Override
- public boolean next(List<KeyValue> outResults)
+ public boolean next(List<Cell> outResults)
throws IOException {
// apply the batching limit by default
return next(outResults, batch);
}
@Override
- public synchronized boolean next(List<KeyValue> outResults, int limit) throws IOException {
+ public synchronized boolean next(List<Cell> outResults, int limit) throws IOException {
if (this.filterClosed) {
throw new UnknownScannerException("Scanner was closed (timed out?) " +
"after we renewed it. Could be caused by a very slow scanner " +
@@ -3538,20 +3539,20 @@ public class HRegion implements HeapSize
}
@Override
- public boolean nextRaw(List<KeyValue> outResults)
+ public boolean nextRaw(List<Cell> outResults)
throws IOException {
return nextRaw(outResults, batch);
}
@Override
- public boolean nextRaw(List<KeyValue> outResults, int limit) throws IOException {
+ public boolean nextRaw(List<Cell> outResults, int limit) throws IOException {
boolean returnResult;
if (outResults.isEmpty()) {
// Usually outResults is empty. This is true when next is called
// to handle scan or get operation.
returnResult = nextInternal(outResults, limit);
} else {
- List<KeyValue> tmpList = new ArrayList<KeyValue>();
+ List<Cell> tmpList = new ArrayList<Cell>();
returnResult = nextInternal(tmpList, limit);
outResults.addAll(tmpList);
}
@@ -3562,7 +3563,10 @@ public class HRegion implements HeapSize
if (region != null && region.metricsRegion != null) {
long totalSize = 0;
if (outResults != null) {
- for(KeyValue kv:outResults) {
+ for(Cell c:outResults) {
+ // TODO clean up
+ KeyValue kv = KeyValueUtil.ensureKeyValue(c);
+
totalSize += kv.getLength();
}
}
@@ -3572,7 +3576,7 @@ public class HRegion implements HeapSize
}
- private void populateFromJoinedHeap(List<KeyValue> results, int limit)
+ private void populateFromJoinedHeap(List<Cell> results, int limit)
throws IOException {
assert joinedContinuationRow != null;
KeyValue kv = populateResult(results, this.joinedHeap, limit,
@@ -3597,7 +3601,7 @@ public class HRegion implements HeapSize
* @param length length for currentRow
* @return KV_LIMIT if limit reached, next KeyValue otherwise.
*/
- private KeyValue populateResult(List<KeyValue> results, KeyValueHeap heap, int limit,
+ private KeyValue populateResult(List<Cell> results, KeyValueHeap heap, int limit,
byte[] currentRow, int offset, short length) throws IOException {
KeyValue nextKv;
do {
@@ -3619,7 +3623,7 @@ public class HRegion implements HeapSize
return this.filter != null && this.filter.filterAllRemaining();
}
- private boolean nextInternal(List<KeyValue> results, int limit)
+ private boolean nextInternal(List<Cell> results, int limit)
throws IOException {
if (!results.isEmpty()) {
throw new IllegalArgumentException("First parameter should be an empty list");
@@ -3657,7 +3661,7 @@ public class HRegion implements HeapSize
// First, check if we are at a stop row. If so, there are no more results.
if (stopRow) {
if (filter != null && filter.hasFilterRow()) {
- filter.filterRow(results);
+ filter.filterRowCells(results);
}
return false;
}
@@ -3690,7 +3694,7 @@ public class HRegion implements HeapSize
// We have the part of the row necessary for filtering (all of it, usually).
// First filter with the filterRow(List).
if (filter != null && filter.hasFilterRow()) {
- filter.filterRow(results);
+ filter.filterRowCells(results);
}
if (isEmptyRow) {
boolean moreRows = nextRow(currentRow, offset, length);
@@ -4392,7 +4396,7 @@ public class HRegion implements HeapSize
get.addFamily(family);
}
}
- List<KeyValue> results = get(get, true);
+ List<Cell> results = get(get, true);
return new Result(results);
}
@@ -4401,10 +4405,10 @@ public class HRegion implements HeapSize
* @param withCoprocessor invoke coprocessor or not. We don't want to
* always invoke cp for this private method.
*/
- private List<KeyValue> get(Get get, boolean withCoprocessor)
+ private List<Cell> get(Get get, boolean withCoprocessor)
throws IOException {
- List<KeyValue> results = new ArrayList<KeyValue>();
+ List<Cell> results = new ArrayList<Cell>();
// pre-get CP hook
if (withCoprocessor && (coprocessorHost != null)) {
@@ -4433,8 +4437,8 @@ public class HRegion implements HeapSize
if (this.metricsRegion != null) {
long totalSize = 0l;
if (results != null) {
- for (KeyValue kv:results) {
- totalSize += kv.getLength();
+ for (Cell kv:results) {
+ totalSize += KeyValueUtil.ensureKeyValue(kv).getLength();
}
}
this.metricsRegion.updateGet(totalSize);
@@ -4711,8 +4715,8 @@ public class HRegion implements HeapSize
KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
get.addColumn(family.getKey(), kv.getQualifier());
}
- List<KeyValue> results = get(get, false);
-
+ List<Cell> results = get(get, false);
+
// Iterate the input columns and update existing values if they were
// found, otherwise add new column initialized to the append value
@@ -4724,9 +4728,8 @@ public class HRegion implements HeapSize
KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
KeyValue newKV;
if (idx < results.size()
- && results.get(idx).matchingQualifier(kv.getBuffer(),
- kv.getQualifierOffset(), kv.getQualifierLength())) {
- KeyValue oldKv = results.get(idx);
+ && CellUtil.matchingQualifier(results.get(idx),kv)) {
+ KeyValue oldKv = KeyValueUtil.ensureKeyValue(results.get(idx));
// allocate an empty kv once
newKV = new KeyValue(row.length, kv.getFamilyLength(),
kv.getQualifierLength(), now, KeyValue.Type.Put,
@@ -4886,19 +4889,17 @@ public class HRegion implements HeapSize
get.addColumn(family.getKey(), kv.getQualifier());
}
get.setTimeRange(tr.getMin(), tr.getMax());
- List<KeyValue> results = get(get, false);
-
+ List<Cell> results = get(get, false);
+
// Iterate the input columns and update existing values if they were
// found, otherwise add new column initialized to the increment amount
int idx = 0;
- for (Cell cell: family.getValue()) {
- KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
- long amount = Bytes.toLong(kv.getValue());
- byte [] qualifier = kv.getQualifier();
- if (idx < results.size() && results.get(idx).matchingQualifier(qualifier)) {
- kv = results.get(idx);
- if(kv.getValueLength() == Bytes.SIZEOF_LONG) {
- amount += Bytes.toLong(kv.getBuffer(), kv.getValueOffset(), Bytes.SIZEOF_LONG);
+ for (Cell kv: family.getValue()) {
+ long amount = Bytes.toLong(CellUtil.getValueArray(kv));
+ if (idx < results.size() && CellUtil.matchingQualifier(results.get(idx), kv)) {
+ Cell c = results.get(idx);
+ if(c.getValueLength() == Bytes.SIZEOF_LONG) {
+ amount += Bytes.toLong(c.getValueArray(), c.getValueOffset(), Bytes.SIZEOF_LONG);
} else {
// throw DoNotRetryIOException instead of IllegalArgumentException
throw new org.apache.hadoop.hbase.DoNotRetryIOException(
@@ -4909,7 +4910,7 @@ public class HRegion implements HeapSize
// Append new incremented KeyValue to list
KeyValue newKV =
- new KeyValue(row, family.getKey(), qualifier, now, Bytes.toBytes(amount));
+ new KeyValue(row, family.getKey(), CellUtil.getQualifierArray(kv), now, Bytes.toBytes(amount));
newKV.setMvccVersion(w.getWriteNumber());
kvs.add(newKV);
@@ -5167,7 +5168,7 @@ public class HRegion implements HeapSize
// scan.addFamily(HConstants.CATALOG_FAMILY);
RegionScanner scanner = region.getScanner(scan);
try {
- List<KeyValue> kvs = new ArrayList<KeyValue>();
+ List<Cell> kvs = new ArrayList<Cell>();
boolean done = false;
do {
kvs.clear();
@@ -5478,15 +5479,15 @@ public class HRegion implements HeapSize
/**
* A mocked list implementaion - discards all updates.
*/
- private static final List<KeyValue> MOCKED_LIST = new AbstractList<KeyValue>() {
+ private static final List<Cell> MOCKED_LIST = new AbstractList<Cell>() {
@Override
- public void add(int index, KeyValue element) {
+ public void add(int index, Cell element) {
// do nothing
}
@Override
- public boolean addAll(int index, Collection<? extends KeyValue> c) {
+ public boolean addAll(int index, Collection<? extends Cell> c) {
return false; // this list is never changed as a result of an update
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java Fri Aug 30 20:31:44 2013
@@ -55,25 +55,26 @@ import org.apache.hadoop.classification.
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.Chore;
-import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.ClockOutOfSyncException;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
-import org.apache.hadoop.hbase.ClockOutOfSyncException;
-import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HealthCheckChore;
+import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.NotServingRegionException;
-import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableDescriptors;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.YouAreDeadException;
import org.apache.hadoop.hbase.ZNodeClearer;
@@ -106,9 +107,9 @@ import org.apache.hadoop.hbase.ipc.Paylo
import org.apache.hadoop.hbase.ipc.RpcCallContext;
import org.apache.hadoop.hbase.ipc.RpcClient;
import org.apache.hadoop.hbase.ipc.RpcServer;
-import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
+import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.master.SplitLogManager;
import org.apache.hadoop.hbase.master.TableLockManager;
@@ -3059,8 +3060,9 @@ public class HRegionServer implements Cl
if (!results.isEmpty()) {
for (Result r : results) {
if (maxScannerResultSize < Long.MAX_VALUE){
- for (KeyValue kv : r.raw()) {
- currentScanResultSize += kv.heapSize();
+ for (Cell kv : r.raw()) {
+ // TODO
+ currentScanResultSize += KeyValueUtil.ensureKeyValue(kv).heapSize();
}
}
}
@@ -3075,7 +3077,7 @@ public class HRegionServer implements Cl
if (maxResultSize <= 0) {
maxResultSize = maxScannerResultSize;
}
- List<KeyValue> values = new ArrayList<KeyValue>();
+ List<Cell> values = new ArrayList<Cell>();
MultiVersionConsistencyControl.setThreadReadPoint(scanner.getMvccReadPoint());
region.startRegionOperation(Operation.SCAN);
try {
@@ -3087,8 +3089,8 @@ public class HRegionServer implements Cl
boolean moreRows = scanner.nextRaw(values);
if (!values.isEmpty()) {
if (maxScannerResultSize < Long.MAX_VALUE){
- for (KeyValue kv : values) {
- currentScanResultSize += kv.heapSize();
+ for (Cell kv : values) {
+ currentScanResultSize += KeyValueUtil.ensureKeyValue(kv).heapSize();
}
}
results.add(new Result(values));
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java Fri Aug 30 20:31:44 2013
@@ -18,13 +18,13 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.KeyValue;
-
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
+
/**
* Internal scanners differ from client-side scanners in that they operate on
* HStoreKeys and byte[] instead of RowResults. This is because they are
@@ -47,7 +47,7 @@ public interface InternalScanner extends
* @return true if more rows exist after this one, false if scanner is done
* @throws IOException e
*/
- boolean next(List<KeyValue> results) throws IOException;
+ boolean next(List<Cell> results) throws IOException;
/**
* Grab the next row's worth of values with a limit on the number of values
@@ -57,7 +57,7 @@ public interface InternalScanner extends
* @return true if more rows exist after this one, false if scanner is done
* @throws IOException e
*/
- boolean next(List<KeyValue> result, int limit) throws IOException;
+ boolean next(List<Cell> result, int limit) throws IOException;
/**
* Closes the scanner and releases any resources it has allocated
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java Fri Aug 30 20:31:44 2013
@@ -25,6 +25,7 @@ import java.util.List;
import java.util.PriorityQueue;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
@@ -120,7 +121,7 @@ public class KeyValueHeap extends NonLaz
* @param limit
* @return true if there are more keys, false if all scanners are done
*/
- public boolean next(List<KeyValue> result, int limit) throws IOException {
+ public boolean next(List<Cell> result, int limit) throws IOException {
if (this.current == null) {
return false;
}
@@ -153,7 +154,7 @@ public class KeyValueHeap extends NonLaz
* @param result
* @return true if there are more keys, false if all scanners are done
*/
- public boolean next(List<KeyValue> result) throws IOException {
+ public boolean next(List<Cell> result) throws IOException {
return next(result, -1);
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java Fri Aug 30 20:31:44 2013
@@ -34,21 +34,21 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
@@ -786,7 +786,7 @@ public class RegionCoprocessorHost
* @return true if default processing should be bypassed
* @exception IOException Exception
*/
- public boolean preGet(final Get get, final List<KeyValue> results)
+ public boolean preGet(final Get get, final List<Cell> results)
throws IOException {
boolean bypass = false;
ObserverContext<RegionCoprocessorEnvironment> ctx = null;
@@ -794,7 +794,7 @@ public class RegionCoprocessorHost
if (env.getInstance() instanceof RegionObserver) {
ctx = ObserverContext.createAndPrepare(env, ctx);
try {
- ((RegionObserver)env.getInstance()).preGet(ctx, get, results);
+ ((RegionObserver)env.getInstance()).preGetOp(ctx, get, results);
} catch (Throwable e) {
handleCoprocessorThrowable(env, e);
}
@@ -809,17 +809,17 @@ public class RegionCoprocessorHost
/**
* @param get the Get request
- * @param results the result set
+ * @param results the result sett
* @exception IOException Exception
*/
- public void postGet(final Get get, final List<KeyValue> results)
+ public void postGet(final Get get, final List<Cell> results)
throws IOException {
ObserverContext<RegionCoprocessorEnvironment> ctx = null;
for (RegionEnvironment env: coprocessors) {
if (env.getInstance() instanceof RegionObserver) {
ctx = ObserverContext.createAndPrepare(env, ctx);
try {
- ((RegionObserver)env.getInstance()).postGet(ctx, get, results);
+ ((RegionObserver)env.getInstance()).postGetOp(ctx, get, results);
} catch (Throwable e) {
handleCoprocessorThrowable(env, e);
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java Fri Aug 30 20:31:44 2013
@@ -22,8 +22,8 @@ import java.io.IOException;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Scan;
/**
@@ -74,7 +74,7 @@ public interface RegionScanner extends I
* @return true if more rows exist after this one, false if scanner is done
* @throws IOException e
*/
- boolean nextRaw(List<KeyValue> result) throws IOException;
+ boolean nextRaw(List<Cell> result) throws IOException;
/**
* Grab the next row's worth of values with a limit on the number of values
@@ -102,5 +102,5 @@ public interface RegionScanner extends I
* @return true if more rows exist after this one, false if scanner is done
* @throws IOException e
*/
- boolean nextRaw(List<KeyValue> result, int limit) throws IOException;
+ boolean nextRaw(List<Cell> result, int limit) throws IOException;
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java Fri Aug 30 20:31:44 2013
@@ -23,6 +23,7 @@ import java.io.IOException;
import java.util.NavigableSet;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Scan;
@@ -303,7 +304,7 @@ public class ScanQueryMatcher {
* 7. Delete marker need to be version counted together with puts
* they affect
*/
- byte type = kv.getType();
+ byte type = kv.getTypeByte();
if (kv.isDelete()) {
if (!keepDeletedCells) {
// first ignore delete markers if the scanner can do so, and the
@@ -480,11 +481,11 @@ public class ScanQueryMatcher {
return this.filter;
}
- public KeyValue getNextKeyHint(KeyValue kv) throws IOException {
+ public Cell getNextKeyHint(Cell kv) throws IOException {
if (filter == null) {
return null;
} else {
- return filter.getNextKeyHint(kv);
+ return filter.getNextCellHint(kv);
}
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlusher.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlusher.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlusher.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlusher.java Fri Aug 30 20:31:44 2013
@@ -30,8 +30,10 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.regionserver.compactions.Compactor;
@@ -117,16 +119,17 @@ s */
Compactor.CellSink sink, long smallestReadPoint) throws IOException {
int compactionKVMax =
conf.getInt(HConstants.COMPACTION_KV_MAX, HConstants.COMPACTION_KV_MAX_DEFAULT);
- List<KeyValue> kvs = new ArrayList<KeyValue>();
+ List<Cell> kvs = new ArrayList<Cell>();
boolean hasMore;
long flushed = 0;
do {
hasMore = scanner.next(kvs, compactionKVMax);
if (!kvs.isEmpty()) {
- for (KeyValue kv : kvs) {
+ for (Cell c : kvs) {
// If we know that this KV is going to be included always, then let us
// set its memstoreTS to 0. This will help us save space when writing to
// disk.
+ KeyValue kv = KeyValueUtil.ensureKeyValue(c);
if (kv.getMvccVersion() <= smallestReadPoint) {
// let us not change the original KV. It could be in the memstore
// changing its memstoreTS could affect other threads/scanners.
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java Fri Aug 30 20:31:44 2013
@@ -29,9 +29,11 @@ import java.util.concurrent.CountDownLat
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.executor.ExecutorService;
import org.apache.hadoop.hbase.filter.Filter;
@@ -358,7 +360,7 @@ public class StoreScanner extends NonLaz
* @return true if there are more rows, false if scanner is done
*/
@Override
- public synchronized boolean next(List<KeyValue> outResult, int limit) throws IOException {
+ public synchronized boolean next(List<Cell> outResult, int limit) throws IOException {
if (checkReseek()) {
return true;
}
@@ -410,7 +412,8 @@ public class StoreScanner extends NonLaz
Filter f = matcher.getFilter();
if (f != null) {
- kv = f.transform(kv);
+ // TODO convert Scan Query Matcher to be Cell instead of KV based ?
+ kv = KeyValueUtil.ensureKeyValue(f.transformCell(kv));
}
this.countPerRow++;
@@ -473,7 +476,8 @@ public class StoreScanner extends NonLaz
break;
case SEEK_NEXT_USING_HINT:
- KeyValue nextKV = matcher.getNextKeyHint(kv);
+ // TODO convert resee to Cell?
+ KeyValue nextKV = KeyValueUtil.ensureKeyValue(matcher.getNextKeyHint(kv));
if (nextKV != null) {
reseek(nextKV);
} else {
@@ -496,7 +500,7 @@ public class StoreScanner extends NonLaz
}
@Override
- public synchronized boolean next(List<KeyValue> outResult) throws IOException {
+ public synchronized boolean next(List<Cell> outResult) throws IOException {
return next(outResult, -1);
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java Fri Aug 30 20:31:44 2013
@@ -28,8 +28,10 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.CellOutputStream;
import org.apache.hadoop.hbase.io.compress.Compression;
@@ -198,14 +200,15 @@ public abstract class Compactor {
int bytesWritten = 0;
// Since scanner.next() can return 'false' but still be delivering data,
// we have to use a do/while loop.
- List<KeyValue> kvs = new ArrayList<KeyValue>();
+ List<Cell> kvs = new ArrayList<Cell>();
// Limit to "hbase.hstore.compaction.kv.max" (default 10) to avoid OOME
int closeCheckInterval = HStore.getCloseCheckInterval();
boolean hasMore;
do {
hasMore = scanner.next(kvs, compactionKVMax);
// output to writer:
- for (KeyValue kv : kvs) {
+ for (Cell c : kvs) {
+ KeyValue kv = KeyValueUtil.ensureKeyValue(c);
if (kv.getMvccVersion() <= smallestReadPoint) {
kv.setMvccVersion(0);
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java Fri Aug 30 20:31:44 2013
@@ -18,14 +18,7 @@
*/
package org.apache.hadoop.hbase.rest;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.rest.ResourceBase;
-import org.apache.hadoop.hbase.rest.RowSpec;
-import org.apache.hadoop.hbase.rest.TableResource;
-import org.apache.hadoop.hbase.rest.model.CellModel;
-import org.apache.hadoop.hbase.rest.model.CellSetModel;
-import org.apache.hadoop.hbase.rest.model.RowModel;
+import java.io.IOException;
import javax.ws.rs.GET;
import javax.ws.rs.Produces;
@@ -33,7 +26,13 @@ import javax.ws.rs.core.Context;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
-import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.rest.model.CellModel;
+import org.apache.hadoop.hbase.rest.model.CellSetModel;
+import org.apache.hadoop.hbase.rest.model.RowModel;
@InterfaceAudience.Private
public class MultiRowResource extends ResourceBase {
@@ -83,12 +82,13 @@ public class MultiRowResource extends Re
.build();
}
- KeyValue value = null;
+ Cell value = null;
RowModel rowModel = new RowModel(rk);
while ((value = generator.next()) != null) {
- rowModel.addCell(new CellModel(value.getFamily(), value.getQualifier(),
- value.getTimestamp(), value.getValue()));
+ rowModel.addCell(new CellModel(CellUtil.getFamilyArray(value),
+ CellUtil.getQualifierArray(value),
+ value.getTimestamp(), CellUtil.getValueArray(value)));
}
model.addRow(rowModel);
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java Fri Aug 30 20:31:44 2013
@@ -21,14 +21,14 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.Iterator;
-
+
import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.rest.model.ScannerModel;
@InterfaceAudience.Private
-public abstract class ResultGenerator implements Iterator<KeyValue> {
+public abstract class ResultGenerator implements Iterator<Cell> {
public static ResultGenerator fromRowSpec(final String table,
final RowSpec rowspec, final Filter filter) throws IOException {
@@ -43,7 +43,7 @@ public abstract class ResultGenerator im
return ScannerModel.buildFilter(filter);
}
- public abstract void putBack(KeyValue kv);
+ public abstract void putBack(Cell kv);
public abstract void close();
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java Fri Aug 30 20:31:44 2013
@@ -38,6 +38,8 @@ import javax.ws.rs.core.UriInfo;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableNotFoundException;
@@ -96,17 +98,17 @@ public class RowResource extends Resourc
}
int count = 0;
CellSetModel model = new CellSetModel();
- KeyValue value = generator.next();
- byte[] rowKey = value.getRow();
+ Cell value = generator.next();
+ byte[] rowKey = CellUtil.getRowArray(value);
RowModel rowModel = new RowModel(rowKey);
do {
- if (!Bytes.equals(value.getRow(), rowKey)) {
+ if (!Bytes.equals(CellUtil.getRowArray(value), rowKey)) {
model.addRow(rowModel);
- rowKey = value.getRow();
+ rowKey = CellUtil.getRowArray(value);
rowModel = new RowModel(rowKey);
}
- rowModel.addCell(new CellModel(value.getFamily(), value.getQualifier(),
- value.getTimestamp(), value.getValue()));
+ rowModel.addCell(new CellModel(CellUtil.getFamilyArray(value), CellUtil.getQualifierArray(value),
+ value.getTimestamp(), CellUtil.getValueArray(value)));
if (++count > rowspec.getMaxValues()) {
break;
}
@@ -155,8 +157,8 @@ public class RowResource extends Resourc
.type(MIMETYPE_TEXT).entity("Not found" + CRLF)
.build();
}
- KeyValue value = generator.next();
- ResponseBuilder response = Response.ok(value.getValue());
+ Cell value = generator.next();
+ ResponseBuilder response = Response.ok(CellUtil.getValueArray(value));
response.header("X-Timestamp", value.getTimestamp());
servlet.getMetrics().incrementSucessfulGetRequests(1);
return response.build();
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java Fri Aug 30 20:31:44 2013
@@ -26,6 +26,7 @@ import java.util.NoSuchElementException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
@@ -38,8 +39,8 @@ import org.apache.hadoop.util.StringUtil
public class RowResultGenerator extends ResultGenerator {
private static final Log LOG = LogFactory.getLog(RowResultGenerator.class);
- private Iterator<KeyValue> valuesI;
- private KeyValue cache;
+ private Iterator<Cell> valuesI;
+ private Cell cache;
public RowResultGenerator(final String tableName, final RowSpec rowspec,
final Filter filter) throws IllegalArgumentException, IOException {
@@ -91,9 +92,9 @@ public class RowResultGenerator extends
return valuesI.hasNext();
}
- public KeyValue next() {
+ public Cell next() {
if (cache != null) {
- KeyValue kv = cache;
+ Cell kv = cache;
cache = null;
return kv;
}
@@ -107,7 +108,7 @@ public class RowResultGenerator extends
}
}
- public void putBack(KeyValue kv) {
+ public void putBack(Cell kv) {
this.cache = kv;
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java Fri Aug 30 20:31:44 2013
@@ -35,6 +35,8 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
@@ -91,7 +93,7 @@ public class ScannerInstanceResource ext
}
int count = limit;
do {
- KeyValue value = null;
+ Cell value = null;
try {
value = generator.next();
} catch (IllegalStateException e) {
@@ -115,10 +117,10 @@ public class ScannerInstanceResource ext
break;
}
if (rowKey == null) {
- rowKey = value.getRow();
+ rowKey = CellUtil.getRowArray(value);
rowModel = new RowModel(rowKey);
}
- if (!Bytes.equals(value.getRow(), rowKey)) {
+ if (!Bytes.equals(CellUtil.getRowArray(value), rowKey)) {
// if maxRows was given as a query param, stop if we would exceed the
// specified number of rows
if (maxRows > 0) {
@@ -128,12 +130,12 @@ public class ScannerInstanceResource ext
}
}
model.addRow(rowModel);
- rowKey = value.getRow();
+ rowKey = CellUtil.getRowArray(value);
rowModel = new RowModel(rowKey);
}
rowModel.addCell(
- new CellModel(value.getFamily(), value.getQualifier(),
- value.getTimestamp(), value.getValue()));
+ new CellModel(CellUtil.getFamilyArray(value), CellUtil.getQualifierArray(value),
+ value.getTimestamp(), CellUtil.getValueArray(value)));
} while (--count > 0);
model.addRow(rowModel);
ResponseBuilder response = Response.ok(model);
@@ -151,17 +153,17 @@ public class ScannerInstanceResource ext
}
servlet.getMetrics().incrementRequests(1);
try {
- KeyValue value = generator.next();
+ Cell value = generator.next();
if (value == null) {
LOG.info("generator exhausted");
return Response.noContent().build();
}
- ResponseBuilder response = Response.ok(value.getValue());
+ ResponseBuilder response = Response.ok(CellUtil.getValueArray(value));
response.cacheControl(cacheControl);
- response.header("X-Row", Base64.encodeBytes(value.getRow()));
+ response.header("X-Row", Base64.encodeBytes(CellUtil.getRowArray(value)));
response.header("X-Column",
Base64.encodeBytes(
- KeyValue.makeColumn(value.getFamily(), value.getQualifier())));
+ KeyValue.makeColumn(CellUtil.getFamilyArray(value), CellUtil.getQualifierArray(value))));
response.header("X-Timestamp", value.getTimestamp());
servlet.getMetrics().incrementSucessfulGetRequests(1);
return response.build();
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java Fri Aug 30 20:31:44 2013
@@ -25,6 +25,7 @@ import java.util.Iterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.client.HTableInterface;
@@ -51,8 +52,8 @@ public class ScannerResultGenerator exte
}
private String id;
- private Iterator<KeyValue> rowI;
- private KeyValue cache;
+ private Iterator<Cell> rowI;
+ private Cell cache;
private ResultScanner scanner;
private Result cached;
@@ -131,9 +132,9 @@ public class ScannerResultGenerator exte
return cached != null;
}
- public KeyValue next() {
+ public Cell next() {
if (cache != null) {
- KeyValue kv = cache;
+ Cell kv = cache;
cache = null;
return kv;
}
@@ -169,7 +170,7 @@ public class ScannerResultGenerator exte
return null;
}
- public void putBack(KeyValue kv) {
+ public void putBack(Cell kv) {
this.cache = kv;
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java Fri Aug 30 20:31:44 2013
@@ -168,7 +168,7 @@ public class RemoteHTable implements HTa
protected Result[] buildResultFromModel(final CellSetModel model) {
List<Result> results = new ArrayList<Result>();
for (RowModel row: model.getRows()) {
- List<KeyValue> kvs = new ArrayList<KeyValue>();
+ List<Cell> kvs = new ArrayList<Cell>();
for (CellModel cell: row.getCells()) {
byte[][] split = KeyValue.parseColumn(cell.getColumn());
byte[] column = split[0];
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java Fri Aug 30 20:31:44 2013
@@ -18,6 +18,8 @@
package org.apache.hadoop.hbase.security.access;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.KeyValue;
@@ -59,7 +61,9 @@ class AccessControlFilter extends Filter
}
@Override
- public ReturnCode filterKeyValue(KeyValue kv) {
+ public ReturnCode filterKeyValue(Cell c) {
+ // TODO go and redo auth manager to use Cell instead of KV.
+ KeyValue kv = KeyValueUtil.ensureKeyValue(c);
if (authManager.authorize(user, table, kv, TablePermission.Action.READ)) {
return ReturnCode.INCLUDE;
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java Fri Aug 30 20:31:44 2013
@@ -33,13 +33,14 @@ import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.catalog.MetaReader;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
@@ -48,6 +49,7 @@ import org.apache.hadoop.hbase.client.Pu
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.QualifierFilter;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
@@ -367,14 +369,14 @@ public class AccessControlLists {
iScanner = aclRegion.getScanner(scan);
while (true) {
- List<KeyValue> row = new ArrayList<KeyValue>();
+ List<Cell> row = new ArrayList<Cell>();
boolean hasNext = iScanner.next(row);
ListMultimap<String,TablePermission> perms = ArrayListMultimap.create();
byte[] entry = null;
- for (KeyValue kv : row) {
+ for (Cell kv : row) {
if (entry == null) {
- entry = kv.getRow();
+ entry = CellUtil.getRowArray(kv);
}
Pair<String,TablePermission> permissionsOfUserOnTable =
parsePermissionRecord(entry, kv);
@@ -511,7 +513,7 @@ public class AccessControlLists {
byte[] entryName, Result result) {
ListMultimap<String, TablePermission> perms = ArrayListMultimap.create();
if (result != null && result.size() > 0) {
- for (KeyValue kv : result.raw()) {
+ for (Cell kv : result.raw()) {
Pair<String,TablePermission> permissionsOfUserOnTable =
parsePermissionRecord(entryName, kv);
@@ -527,16 +529,16 @@ public class AccessControlLists {
}
private static Pair<String, TablePermission> parsePermissionRecord(
- byte[] entryName, KeyValue kv) {
+ byte[] entryName, Cell kv) {
// return X given a set of permissions encoded in the permissionRecord kv.
- byte[] family = kv.getFamily();
+ byte[] family = CellUtil.getFamilyArray(kv);
if (!Bytes.equals(family, ACL_LIST_FAMILY)) {
return null;
}
- byte[] key = kv.getQualifier();
- byte[] value = kv.getValue();
+ byte[] key = CellUtil.getQualifierArray(kv);
+ byte[] value = CellUtil.getValueArray(kv);
if (LOG.isDebugEnabled()) {
LOG.debug("Read acl: kv ["+
Bytes.toStringBinary(key)+": "+
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java Fri Aug 30 20:31:44 2013
@@ -945,8 +945,8 @@ public class AccessController extends Ba
}
@Override
- public void preGet(final ObserverContext<RegionCoprocessorEnvironment> c,
- final Get get, final List<KeyValue> result) throws IOException {
+ public void preGetOp(final ObserverContext<RegionCoprocessorEnvironment> c,
+ final Get get, final List<Cell> result) throws IOException {
/*
if column family level checks fail, check for a qualifier level permission
in one of the families. If it is present, then continue with the AccessControlFilter.
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java Fri Aug 30 20:31:44 2013
@@ -24,6 +24,8 @@ import java.util.List;
import java.util.TreeMap;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Increment;
@@ -33,8 +35,8 @@ import org.apache.hadoop.hbase.regionser
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
import org.apache.hadoop.hbase.thrift.generated.TCell;
-import org.apache.hadoop.hbase.thrift.generated.TIncrement;
import org.apache.hadoop.hbase.thrift.generated.TColumn;
+import org.apache.hadoop.hbase.thrift.generated.TIncrement;
import org.apache.hadoop.hbase.thrift.generated.TRowResult;
import org.apache.hadoop.hbase.util.Bytes;
@@ -98,10 +100,10 @@ public class ThriftUtilities {
* Hbase Cell object
* @return Thrift TCell array
*/
- static public List<TCell> cellFromHBase(KeyValue in) {
+ static public List<TCell> cellFromHBase(Cell in) {
List<TCell> list = new ArrayList<TCell>(1);
if (in != null) {
- list.add(new TCell(ByteBuffer.wrap(in.getValue()), in.getTimestamp()));
+ list.add(new TCell(ByteBuffer.wrap(CellUtil.getValueArray(in)), in.getTimestamp()));
}
return list;
}
@@ -112,12 +114,12 @@ public class ThriftUtilities {
* @param in Hbase Cell array
* @return Thrift TCell array
*/
- static public List<TCell> cellFromHBase(KeyValue[] in) {
+ static public List<TCell> cellFromHBase(Cell[] in) {
List<TCell> list = null;
if (in != null) {
list = new ArrayList<TCell>(in.length);
for (int i = 0; i < in.length; i++) {
- list.add(new TCell(ByteBuffer.wrap(in[i].getValue()), in[i].getTimestamp()));
+ list.add(new TCell(ByteBuffer.wrap(CellUtil.getValueArray(in[i])), in[i].getTimestamp()));
}
} else {
list = new ArrayList<TCell>(0);
@@ -150,19 +152,19 @@ public class ThriftUtilities {
result.row = ByteBuffer.wrap(result_.getRow());
if (sortColumns) {
result.sortedColumns = new ArrayList<TColumn>();
- for (KeyValue kv : result_.raw()) {
+ for (Cell kv : result_.raw()) {
result.sortedColumns.add(new TColumn(
- ByteBuffer.wrap(KeyValue.makeColumn(kv.getFamily(),
- kv.getQualifier())),
- new TCell(ByteBuffer.wrap(kv.getValue()), kv.getTimestamp())));
+ ByteBuffer.wrap(KeyValue.makeColumn(CellUtil.getFamilyArray(kv),
+ CellUtil.getQualifierArray(kv))),
+ new TCell(ByteBuffer.wrap(CellUtil.getValueArray(kv)), kv.getTimestamp())));
}
} else {
result.columns = new TreeMap<ByteBuffer, TCell>();
- for (KeyValue kv : result_.raw()) {
+ for (Cell kv : result_.raw()) {
result.columns.put(
- ByteBuffer.wrap(KeyValue.makeColumn(kv.getFamily(),
- kv.getQualifier())),
- new TCell(ByteBuffer.wrap(kv.getValue()), kv.getTimestamp()));
+ ByteBuffer.wrap(KeyValue.makeColumn(CellUtil.getFamilyArray(kv),
+ CellUtil.getQualifierArray(kv))),
+ new TCell(ByteBuffer.wrap(CellUtil.getValueArray(kv)), kv.getTimestamp()));
}
}
results.add(result);
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java Fri Aug 30 20:31:44 2013
@@ -18,21 +18,46 @@
*/
package org.apache.hadoop.hbase.thrift2;
+import static org.apache.hadoop.hbase.util.Bytes.getBytes;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Increment;
+import org.apache.hadoop.hbase.client.OperationWithAttributes;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.RowMutations;
+import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.ParseFilter;
-import org.apache.hadoop.hbase.thrift2.generated.*;
+import org.apache.hadoop.hbase.thrift2.generated.TColumn;
+import org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement;
+import org.apache.hadoop.hbase.thrift2.generated.TColumnValue;
+import org.apache.hadoop.hbase.thrift2.generated.TDelete;
+import org.apache.hadoop.hbase.thrift2.generated.TDeleteType;
+import org.apache.hadoop.hbase.thrift2.generated.TDurability;
+import org.apache.hadoop.hbase.thrift2.generated.TGet;
+import org.apache.hadoop.hbase.thrift2.generated.TIncrement;
+import org.apache.hadoop.hbase.thrift2.generated.TMutation;
+import org.apache.hadoop.hbase.thrift2.generated.TPut;
+import org.apache.hadoop.hbase.thrift2.generated.TResult;
+import org.apache.hadoop.hbase.thrift2.generated.TRowMutations;
+import org.apache.hadoop.hbase.thrift2.generated.TScan;
+import org.apache.hadoop.hbase.thrift2.generated.TTimeRange;
import org.apache.hadoop.hbase.util.Bytes;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.*;
-
-import static org.apache.hadoop.hbase.util.Bytes.getBytes;
-
@InterfaceAudience.Private
public class ThriftUtilities {
@@ -115,19 +140,19 @@ public class ThriftUtilities {
* @return converted result, returns an empty result if the input is <code>null</code>
*/
public static TResult resultFromHBase(Result in) {
- KeyValue[] raw = in.raw();
+ Cell[] raw = in.raw();
TResult out = new TResult();
byte[] row = in.getRow();
if (row != null) {
out.setRow(in.getRow());
}
List<TColumnValue> columnValues = new ArrayList<TColumnValue>();
- for (KeyValue kv : raw) {
+ for (Cell kv : raw) {
TColumnValue col = new TColumnValue();
- col.setFamily(kv.getFamily());
- col.setQualifier(kv.getQualifier());
+ col.setFamily(CellUtil.getFamilyArray(kv));
+ col.setQualifier(CellUtil.getQualifierArray(kv));
col.setTimestamp(kv.getTimestamp());
- col.setValue(kv.getValue());
+ col.setValue(CellUtil.getValueArray(kv));
columnValues.add(col);
}
out.setColumnValues(columnValues);
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java Fri Aug 30 20:31:44 2013
@@ -56,8 +56,8 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hbase.Abortable;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.ClusterStatus;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -67,6 +67,7 @@ import org.apache.hadoop.hbase.HTableDes
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.catalog.MetaEditor;
import org.apache.hadoop.hbase.client.Delete;
@@ -2558,8 +2559,8 @@ public class HBaseFsck extends Configure
int countRecord = 1;
// comparator to sort KeyValues with latest modtime
- final Comparator<KeyValue> comp = new Comparator<KeyValue>() {
- public int compare(KeyValue k1, KeyValue k2) {
+ final Comparator<Cell> comp = new Comparator<Cell>() {
+ public int compare(Cell k1, Cell k2) {
return (int)(k1.getTimestamp() - k2.getTimestamp());
}
};
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java Fri Aug 30 20:31:44 2013
@@ -40,7 +40,8 @@ import org.apache.hadoop.hbase.client.Re
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
-import org.apache.hadoop.hbase.util.*;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hdfs.MiniDFSCluster;
/**
@@ -514,7 +515,7 @@ public abstract class HBaseTestCase exte
public interface ScannerIncommon
extends Iterable<Result> {
- boolean next(List<KeyValue> values)
+ boolean next(List<Cell> values)
throws IOException;
void close() throws IOException;
@@ -526,7 +527,8 @@ public abstract class HBaseTestCase exte
this.scanner = scanner;
}
- public boolean next(List<KeyValue> values)
+ @Override
+ public boolean next(List<Cell> values)
throws IOException {
Result results = scanner.next();
if (results == null) {
@@ -542,7 +544,7 @@ public abstract class HBaseTestCase exte
}
@SuppressWarnings("unchecked")
- public Iterator iterator() {
+ public Iterator<Result> iterator() {
return scanner.iterator();
}
}
@@ -554,15 +556,18 @@ public abstract class HBaseTestCase exte
this.scanner = scanner;
}
- public boolean next(List<KeyValue> results)
+ @Override
+ public boolean next(List<Cell> results)
throws IOException {
return scanner.next(results);
}
+ @Override
public void close() throws IOException {
scanner.close();
}
+ @Override
public Iterator<Result> iterator() {
throw new UnsupportedOperationException();
}
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java Fri Aug 30 20:31:44 2013
@@ -2677,19 +2677,19 @@ public class HBaseTestingUtility extends
* Do a small get/scan against one store. This is required because store
* has no actual methods of querying itself, and relies on StoreScanner.
*/
- public static List<KeyValue> getFromStoreFile(HStore store,
+ public static List<Cell> getFromStoreFile(HStore store,
Get get) throws IOException {
MultiVersionConsistencyControl.resetThreadReadPoint();
Scan scan = new Scan(get);
InternalScanner scanner = (InternalScanner) store.getScanner(scan,
scan.getFamilyMap().get(store.getFamily().getName()));
- List<KeyValue> result = new ArrayList<KeyValue>();
+ List<Cell> result = new ArrayList<Cell>();
scanner.next(result);
if (!result.isEmpty()) {
// verify that we are on the row we want:
- KeyValue kv = result.get(0);
- if (!Bytes.equals(kv.getRow(), get.getRow())) {
+ Cell kv = result.get(0);
+ if (!CellUtil.matchingRow(kv, get.getRow())) {
result.clear();
}
}
@@ -2720,7 +2720,7 @@ public class HBaseTestingUtility extends
* Do a small get/scan against one store. This is required because store
* has no actual methods of querying itself, and relies on StoreScanner.
*/
- public static List<KeyValue> getFromStoreFile(HStore store,
+ public static List<Cell> getFromStoreFile(HStore store,
byte [] row,
NavigableSet<byte[]> columns
) throws IOException {
@@ -2781,8 +2781,8 @@ public class HBaseTestingUtility extends
}
public static void assertKVListsEqual(String additionalMsg,
- final List<KeyValue> expected,
- final List<KeyValue> actual) {
+ final List<? extends Cell> expected,
+ final List<? extends Cell> actual) {
final int eLen = expected.size();
final int aLen = actual.size();
final int minLen = Math.min(eLen, aLen);
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java Fri Aug 30 20:31:44 2013
@@ -173,10 +173,10 @@ public class TestAcidGuarantees implemen
msg.append("Failed after ").append(numVerified).append("!");
msg.append("Expected=").append(Bytes.toStringBinary(expected));
msg.append("Got:\n");
- for (KeyValue kv : res.list()) {
+ for (Cell kv : res.list()) {
msg.append(kv.toString());
msg.append(" val= ");
- msg.append(Bytes.toStringBinary(kv.getValue()));
+ msg.append(Bytes.toStringBinary(CellUtil.getValueArray(kv)));
msg.append("\n");
}
throw new RuntimeException(msg.toString());
@@ -230,10 +230,10 @@ public class TestAcidGuarantees implemen
msg.append("Failed after ").append(numRowsScanned).append("!");
msg.append("Expected=").append(Bytes.toStringBinary(expected));
msg.append("Got:\n");
- for (KeyValue kv : res.list()) {
+ for (Cell kv : res.list()) {
msg.append(kv.toString());
msg.append(" val= ");
- msg.append(Bytes.toStringBinary(kv.getValue()));
+ msg.append(Bytes.toStringBinary(CellUtil.getValueArray(kv)));
msg.append("\n");
}
throw new RuntimeException(msg.toString());
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java Fri Aug 30 20:31:44 2013
@@ -236,7 +236,7 @@ public class TestMultiVersions {
get.setTimeStamp(timestamp[j]);
Result result = table.get(get);
int cellCount = 0;
- for(@SuppressWarnings("unused")KeyValue kv : result.list()) {
+ for(@SuppressWarnings("unused")Cell kv : result.list()) {
cellCount++;
}
assertTrue(cellCount == 1);
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TimestampTestBase.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TimestampTestBase.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TimestampTestBase.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TimestampTestBase.java Fri Aug 30 20:31:44 2013
@@ -107,7 +107,7 @@ public class TimestampTestBase extends H
get.setMaxVersions(3);
Result result = incommon.get(get);
assertEquals(1, result.size());
- long time = Bytes.toLong(result.raw()[0].getValue());
+ long time = Bytes.toLong(CellUtil.getValueArray(result.raw()[0]));
assertEquals(time, currentTime);
}
@@ -136,10 +136,10 @@ public class TimestampTestBase extends H
get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
get.setMaxVersions(tss.length);
Result result = incommon.get(get);
- KeyValue [] kvs = result.raw();
+ Cell [] kvs = result.raw();
assertEquals(kvs.length, tss.length);
for(int i=0;i<kvs.length;i++) {
- t = Bytes.toLong(kvs[i].getValue());
+ t = Bytes.toLong(CellUtil.getValueArray(kvs[i]));
assertEquals(tss[i], t);
}
@@ -155,7 +155,7 @@ public class TimestampTestBase extends H
kvs = result.raw();
assertEquals(kvs.length, tss.length - 1);
for(int i=1;i<kvs.length;i++) {
- t = Bytes.toLong(kvs[i-1].getValue());
+ t = Bytes.toLong(CellUtil.getValueArray(kvs[i-1]));
assertEquals(tss[i], t);
}
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/MetaMockingUtil.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/MetaMockingUtil.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/MetaMockingUtil.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/MetaMockingUtil.java Fri Aug 30 20:31:44 2013
@@ -23,6 +23,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue;
@@ -72,7 +73,7 @@ public class MetaMockingUtil {
*/
public static Result getMetaTableRowResult(HRegionInfo region, final ServerName sn,
HRegionInfo splita, HRegionInfo splitb) throws IOException {
- List<KeyValue> kvs = new ArrayList<KeyValue>();
+ List<Cell> kvs = new ArrayList<Cell>();
if (region != null) {
kvs.add(new KeyValue(
region.getRegionName(),
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java?rev=1519076&r1=1519075&r2=1519076&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java Fri Aug 30 20:31:44 2013
@@ -30,6 +30,7 @@ import java.util.NavigableMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Abortable;
+import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
@@ -97,7 +98,7 @@ public class TestMetaReaderEditorNoClust
public void testGetHRegionInfo() throws IOException {
assertNull(HRegionInfo.getHRegionInfo(new Result()));
- List<KeyValue> kvs = new ArrayList<KeyValue>();
+ List<Cell> kvs = new ArrayList<Cell>();
Result r = new Result(kvs);
assertNull(HRegionInfo.getHRegionInfo(r));
@@ -152,7 +153,7 @@ public class TestMetaReaderEditorNoClust
// show. We will know if they happened or not because we will ask
// mockito at the end of this test to verify that scan was indeed
// called the wanted number of times.
- List<KeyValue> kvs = new ArrayList<KeyValue>();
+ List<Cell> kvs = new ArrayList<Cell>();
final byte [] rowToVerify = Bytes.toBytes("rowToVerify");
kvs.add(new KeyValue(rowToVerify,
HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER,