You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@omid.apache.org by oh...@apache.org on 2018/06/10 09:02:00 UTC

incubator-omid git commit: [OMID-77] Fix coprocessor for HBase 0.9x and 1.x (#1)

Repository: incubator-omid
Updated Branches:
  refs/heads/master 1afeb7dc5 -> 0b5a7b597


[OMID-77] Fix coprocessor for HBase 0.9x and 1.x (#1)

The original implementation of shims for HBase 0.9x/1.x were introduced here:
https://github.com/yahoo/omid/pull/37/commits

Change-Id: I603c7658132b41b17963ddceecae6a19bc4a6202

Project: http://git-wip-us.apache.org/repos/asf/incubator-omid/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-omid/commit/0b5a7b59
Tree: http://git-wip-us.apache.org/repos/asf/incubator-omid/tree/0b5a7b59
Diff: http://git-wip-us.apache.org/repos/asf/incubator-omid/diff/0b5a7b59

Branch: refs/heads/master
Commit: 0b5a7b5971d6ca133882b2d1106899d19e59f2ac
Parents: 1afeb7d
Author: Francisco Perez-Sorrosal <fp...@yahoo-inc.com>
Authored: Tue Oct 17 15:49:11 2017 -0700
Committer: Ohad Shacham <oh...@yahoo-inc.com>
Committed: Sun Jun 10 12:01:53 2018 +0300

----------------------------------------------------------------------
 .../java/org/apache/omid/ReflectionHelper.java  |  34 +++
 .../org/apache/omid/TestReflectionHelper.java   |  51 ++++
 .../hbase/regionserver/CompactorScanner.java    | 286 ------------------
 .../omid/transaction/CompactorScanner.java      | 288 +++++++++++++++++++
 .../apache/omid/transaction/OmidCompactor.java  |  25 +-
 .../omid/transaction/TestCompactorScanner.java  |   1 -
 .../hadoop/hbase/regionserver/Region.java       |  47 ---
 .../hbase/regionserver/ScannerContext.java      |   2 +
 .../main/java/org/apache/omid/HBaseShims.java   |  12 +-
 .../java/org/apache/omid/RegionWrapper.java     |  52 ++++
 hbase-shims/hbase-1/pom.xml                     |  14 +
 .../main/java/org/apache/omid/HBaseShims.java   |  21 +-
 .../java/org/apache/omid/RegionWrapper.java     |  52 ++++
 pom.xml                                         |   2 +-
 14 files changed, 534 insertions(+), 353 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/common/src/main/java/org/apache/omid/ReflectionHelper.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/omid/ReflectionHelper.java b/common/src/main/java/org/apache/omid/ReflectionHelper.java
new file mode 100644
index 0000000..bd1b158
--- /dev/null
+++ b/common/src/main/java/org/apache/omid/ReflectionHelper.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.omid;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+public class ReflectionHelper {
+
+    static public <T> Object invokeParameterlessMethod(T theObject, String methodName)
+            throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
+
+        Method method = theObject.getClass().getDeclaredMethod(methodName, null);
+        method.setAccessible(true);
+        return method.invoke(theObject, null);
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/common/src/test/java/org/apache/omid/TestReflectionHelper.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/omid/TestReflectionHelper.java b/common/src/test/java/org/apache/omid/TestReflectionHelper.java
new file mode 100644
index 0000000..51febad
--- /dev/null
+++ b/common/src/test/java/org/apache/omid/TestReflectionHelper.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.omid;
+
+import org.testng.annotations.Test;
+
+import static org.testng.Assert.assertEquals;
+
+public class TestReflectionHelper {
+
+    public static class TestClass {
+
+        private final int theNumber;
+
+        public TestClass(int aNumber) {
+            this.theNumber = aNumber;
+        }
+        
+        private int getTheNumber() {
+            return theNumber;
+        }
+
+    }
+
+    @Test(timeOut = 1_000)
+    public void testReflectionWorksForAPrivateMethodReturningAnInt() throws Exception {
+
+        final int expectedInt = 10;
+        TestClass testClass = new TestClass(expectedInt);
+
+        int result = (int) ReflectionHelper.invokeParameterlessMethod(testClass, "getTheNumber");
+        assertEquals(result, expectedInt);
+
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/hbase-coprocessor/src/main/java/org/apache/hadoop/hbase/regionserver/CompactorScanner.java
----------------------------------------------------------------------
diff --git a/hbase-coprocessor/src/main/java/org/apache/hadoop/hbase/regionserver/CompactorScanner.java b/hbase-coprocessor/src/main/java/org/apache/hadoop/hbase/regionserver/CompactorScanner.java
deleted file mode 100644
index 12ecbb0..0000000
--- a/hbase-coprocessor/src/main/java/org/apache/hadoop/hbase/regionserver/CompactorScanner.java
+++ /dev/null
@@ -1,286 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.regionserver;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Optional;
-import com.google.common.collect.Iterators;
-import com.google.common.collect.PeekingIterator;
-import org.apache.omid.HBaseShims;
-import org.apache.omid.committable.CommitTable;
-import org.apache.omid.committable.CommitTable.Client;
-import org.apache.omid.committable.CommitTable.CommitTimestamp;
-import org.apache.omid.transaction.CellUtils;
-import org.apache.omid.transaction.CellInfo;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.coprocessor.ObserverContext;
-import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Queue;
-import java.util.SortedMap;
-import java.util.concurrent.ExecutionException;
-
-import static org.apache.omid.committable.CommitTable.CommitTimestamp.Location.SHADOW_CELL;
-
-public class CompactorScanner implements InternalScanner {
-    private static final Logger LOG = LoggerFactory.getLogger(CompactorScanner.class);
-    private final InternalScanner internalScanner;
-    private final CommitTable.Client commitTableClient;
-    private final Queue<CommitTable.Client> commitTableClientQueue;
-    private final boolean isMajorCompaction;
-    private final boolean retainNonTransactionallyDeletedCells;
-    private final long lowWatermark;
-
-    private final Region hRegion;
-
-    private boolean hasMoreRows = false;
-    private List<Cell> currentRowWorthValues = new ArrayList<Cell>();
-
-    public CompactorScanner(ObserverContext<RegionCoprocessorEnvironment> e,
-                            InternalScanner internalScanner,
-                            Client commitTableClient,
-                            Queue<CommitTable.Client> commitTableClientQueue,
-                            boolean isMajorCompaction,
-                            boolean preserveNonTransactionallyDeletedCells) throws IOException {
-        this.internalScanner = internalScanner;
-        this.commitTableClient = commitTableClient;
-        this.commitTableClientQueue = commitTableClientQueue;
-        this.isMajorCompaction = isMajorCompaction;
-        this.retainNonTransactionallyDeletedCells = preserveNonTransactionallyDeletedCells;
-        this.lowWatermark = getLowWatermarkFromCommitTable();
-        // Obtain the table in which the scanner is going to operate
-        this.hRegion = HBaseShims.getRegionCoprocessorRegion(e.getEnvironment());
-        LOG.info("Scanner cleaning up uncommitted txs older than LW [{}] in region [{}]",
-                lowWatermark, hRegion.getRegionInfo());
-    }
-
-    @Override
-    public boolean next(List<Cell> results) throws IOException {
-        return next(results, -1);
-    }
-
-    public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException {
-        int limit = scannerContext.getBatchLimit();
-        return next(result, limit);
-    }
-
-    public boolean next(List<Cell> result, int limit) throws IOException {
-
-        if (currentRowWorthValues.isEmpty()) {
-            // 1) Read next row
-            List<Cell> scanResult = new ArrayList<Cell>();
-            hasMoreRows = internalScanner.next(scanResult);
-            if (LOG.isTraceEnabled()) {
-                LOG.trace("Row: Result {} limit {} more rows? {}", scanResult, limit, hasMoreRows);
-            }
-            // 2) Traverse result list separating normal cells from shadow
-            // cells and building a map to access easily the shadow cells.
-            SortedMap<Cell, Optional<Cell>> cellToSc = CellUtils.mapCellsToShadowCells(scanResult);
-
-            // 3) traverse the list of row key values isolated before and
-            // check which ones should be discarded
-            Map<String, CellInfo> lastTimestampedCellsInRow = new HashMap<>();
-            PeekingIterator<Map.Entry<Cell, Optional<Cell>>> iter
-                    = Iterators.peekingIterator(cellToSc.entrySet().iterator());
-            while (iter.hasNext()) {
-                Map.Entry<Cell, Optional<Cell>> entry = iter.next();
-                Cell cell = entry.getKey();
-                Optional<Cell> shadowCellOp = entry.getValue();
-
-                if (cell.getTimestamp() > lowWatermark) {
-                    retain(currentRowWorthValues, cell, shadowCellOp);
-                    continue;
-                }
-
-                if (shouldRetainNonTransactionallyDeletedCell(cell)) {
-                    retain(currentRowWorthValues, cell, shadowCellOp);
-                    continue;
-                }
-
-                // During a minor compaction the coprocessor may only see a
-                // subset of store files and may not have the all the versions
-                // of a cell available for consideration. Therefore, if it
-                // deletes a cell with a tombstone during a minor compaction,
-                // an older version of the cell may become visible again. So,
-                // we have to remove tombstones only in major compactions.
-                if (isMajorCompaction) {
-                    if (CellUtils.isTombstone(cell)) {
-                        if (shadowCellOp.isPresent()) {
-                            skipToNextColumn(cell, iter);
-                        } else {
-                            Optional<CommitTimestamp> commitTimestamp = queryCommitTimestamp(cell);
-                            // Clean the cell only if it is valid
-                            if (commitTimestamp.isPresent() && commitTimestamp.get().isValid()) {
-                                skipToNextColumn(cell, iter);
-                            }
-                        }
-                        continue;
-                    }
-                }
-
-                if (shadowCellOp.isPresent()) {
-                    saveLastTimestampedCell(lastTimestampedCellsInRow, cell, shadowCellOp.get());
-                } else {
-                    Optional<CommitTimestamp> commitTimestamp = queryCommitTimestamp(cell);
-                    if (commitTimestamp.isPresent() && commitTimestamp.get().isValid()) {
-                        // Build the missing shadow cell...
-                        byte[] shadowCellValue = Bytes.toBytes(commitTimestamp.get().getValue());
-                        Cell shadowCell = CellUtils.buildShadowCellFromCell(cell, shadowCellValue);
-                        saveLastTimestampedCell(lastTimestampedCellsInRow, cell, shadowCell);
-                    } else {
-                        LOG.trace("Discarding cell {}", cell);
-                    }
-                }
-            }
-            retainLastTimestampedCellsSaved(currentRowWorthValues, lastTimestampedCellsInRow);
-
-            // 4) Sort the list
-            Collections.sort(currentRowWorthValues, KeyValue.COMPARATOR);
-        }
-
-        // Chomp current row worth values up to the limit
-        if (currentRowWorthValues.size() <= limit) {
-            result.addAll(currentRowWorthValues);
-            currentRowWorthValues.clear();
-        } else {
-            result.addAll(currentRowWorthValues.subList(0, limit));
-            currentRowWorthValues.subList(0, limit).clear();
-        }
-        LOG.trace("Results to preserve {}", result);
-
-        return hasMoreRows;
-    }
-
-    @Override
-    public void close() throws IOException {
-        internalScanner.close();
-        commitTableClientQueue.add(commitTableClient);
-    }
-
-    // ----------------------------------------------------------------------------------------------------------------
-    // Helper methods
-    // ----------------------------------------------------------------------------------------------------------------
-
-    @VisibleForTesting
-    public boolean shouldRetainNonTransactionallyDeletedCell(Cell cell) {
-        return (CellUtil.isDelete(cell) || CellUtil.isDeleteFamily(cell))
-                &&
-                retainNonTransactionallyDeletedCells;
-    }
-
-    private void saveLastTimestampedCell(Map<String, CellInfo> lastCells, Cell cell, Cell shadowCell) {
-        String cellKey = Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength())
-                + ":"
-                + Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
-        LOG.trace("Cell Key: {}", cellKey);
-
-        if (!lastCells.containsKey(cellKey)) {
-            lastCells.put(cellKey, new CellInfo(cell, shadowCell));
-        } else {
-            if (lastCells.get(cellKey).getTimestamp() < cell.getTimestamp()) {
-                lastCells.put(cellKey, new CellInfo(cell, shadowCell));
-            } else {
-                LOG.trace("Forgetting old cell {}", cell);
-            }
-        }
-    }
-
-    private long getLowWatermarkFromCommitTable() throws IOException {
-        try {
-            LOG.trace("About to read log watermark from commit table");
-            return commitTableClient.readLowWatermark().get();
-        } catch (InterruptedException ie) {
-            Thread.currentThread().interrupt();
-            LOG.warn("Interrupted getting low watermark from commit table", ie);
-            throw new IOException("Interrupted getting low watermark from commit table");
-        } catch (ExecutionException ee) {
-            LOG.warn("Problem getting low watermark from commit table");
-            throw new IOException("Problem getting low watermark from commit table", ee.getCause());
-        }
-    }
-
-    private Optional<CommitTimestamp> queryCommitTimestamp(Cell cell) throws IOException {
-        try {
-            Optional<CommitTimestamp> ct = commitTableClient.getCommitTimestamp(cell.getTimestamp()).get();
-            if (ct.isPresent()) {
-                return Optional.of(ct.get());
-            } else {
-                Get g = new Get(CellUtil.cloneRow(cell));
-                byte[] family = CellUtil.cloneFamily(cell);
-                byte[] qualifier = CellUtils.addShadowCellSuffix(cell.getQualifierArray(),
-                        cell.getQualifierOffset(),
-                        cell.getQualifierLength());
-                g.addColumn(family, qualifier);
-                g.setTimeStamp(cell.getTimestamp());
-                Result r = hRegion.get(g);
-                if (r.containsColumn(family, qualifier)) {
-                    return Optional.of(new CommitTimestamp(SHADOW_CELL,
-                            Bytes.toLong(r.getValue(family, qualifier)), true));
-                }
-            }
-        } catch (InterruptedException e) {
-            Thread.currentThread().interrupt();
-            throw new IOException("Interrupted while getting commit timestamp from commit table");
-        } catch (ExecutionException e) {
-            throw new IOException("Error getting commit timestamp from commit table", e);
-        }
-
-        return Optional.absent();
-    }
-
-    private void retain(List<Cell> result, Cell cell, Optional<Cell> shadowCell) {
-        LOG.trace("Retaining cell {}", cell);
-        result.add(cell);
-        if (shadowCell.isPresent()) {
-            LOG.trace("...with shadow cell {}", cell, shadowCell.get());
-            result.add(shadowCell.get());
-        } else {
-            LOG.trace("...without shadow cell! (TS is above Low Watermark)");
-        }
-    }
-
-    private void retainLastTimestampedCellsSaved(List<Cell> result, Map<String, CellInfo> lastTimestampedCellsInRow) {
-        for (CellInfo cellInfo : lastTimestampedCellsInRow.values()) {
-            LOG.trace("Retaining last cell {} with shadow cell {}", cellInfo.getCell(), cellInfo.getShadowCell());
-            result.add(cellInfo.getCell());
-            result.add(cellInfo.getShadowCell());
-        }
-    }
-
-    private void skipToNextColumn(Cell cell, PeekingIterator<Map.Entry<Cell, Optional<Cell>>> iter) {
-        while (iter.hasNext()
-                && CellUtil.matchingFamily(iter.peek().getKey(), cell)
-                && CellUtil.matchingQualifier(iter.peek().getKey(), cell)) {
-            iter.next();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/hbase-coprocessor/src/main/java/org/apache/omid/transaction/CompactorScanner.java
----------------------------------------------------------------------
diff --git a/hbase-coprocessor/src/main/java/org/apache/omid/transaction/CompactorScanner.java b/hbase-coprocessor/src/main/java/org/apache/omid/transaction/CompactorScanner.java
new file mode 100644
index 0000000..b2e5b62
--- /dev/null
+++ b/hbase-coprocessor/src/main/java/org/apache/omid/transaction/CompactorScanner.java
@@ -0,0 +1,288 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.omid.transaction;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Optional;
+import com.google.common.collect.Iterators;
+import com.google.common.collect.PeekingIterator;
+import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.omid.RegionWrapper;
+import org.apache.hadoop.hbase.regionserver.ScannerContext;
+import org.apache.omid.HBaseShims;
+import org.apache.omid.committable.CommitTable;
+import org.apache.omid.committable.CommitTable.Client;
+import org.apache.omid.committable.CommitTable.CommitTimestamp;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Queue;
+import java.util.SortedMap;
+import java.util.concurrent.ExecutionException;
+
+import static org.apache.omid.committable.CommitTable.CommitTimestamp.Location.SHADOW_CELL;
+
+public class CompactorScanner implements InternalScanner {
+    private static final Logger LOG = LoggerFactory.getLogger(CompactorScanner.class);
+    private final InternalScanner internalScanner;
+    private final CommitTable.Client commitTableClient;
+    private final Queue<CommitTable.Client> commitTableClientQueue;
+    private final boolean isMajorCompaction;
+    private final boolean retainNonTransactionallyDeletedCells;
+    private final long lowWatermark;
+
+    private final RegionWrapper regionWrapper;
+
+    private boolean hasMoreRows = false;
+    private List<Cell> currentRowWorthValues = new ArrayList<Cell>();
+
+    public CompactorScanner(ObserverContext<RegionCoprocessorEnvironment> e,
+                            InternalScanner internalScanner,
+                            Client commitTableClient,
+                            Queue<CommitTable.Client> commitTableClientQueue,
+                            boolean isMajorCompaction,
+                            boolean preserveNonTransactionallyDeletedCells) throws IOException {
+        this.internalScanner = internalScanner;
+        this.commitTableClient = commitTableClient;
+        this.commitTableClientQueue = commitTableClientQueue;
+        this.isMajorCompaction = isMajorCompaction;
+        this.retainNonTransactionallyDeletedCells = preserveNonTransactionallyDeletedCells;
+        this.lowWatermark = getLowWatermarkFromCommitTable();
+        // Obtain the table in which the scanner is going to operate
+        this.regionWrapper = HBaseShims.getRegionCoprocessorRegion(e.getEnvironment());
+        LOG.info("Scanner cleaning up uncommitted txs older than LW [{}] in region [{}]",
+                 lowWatermark, regionWrapper.getRegionInfo());
+    }
+
+    @Override
+    public boolean next(List<Cell> results) throws IOException {
+        return next(results, -1);
+    }
+
+    // This method is invoked only in HBase 1.x versions
+    public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException {
+        int limit = HBaseShims.getBatchLimit(scannerContext);
+        return next(result, limit);
+    }
+
+    public boolean next(List<Cell> result, int limit) throws IOException {
+
+        if (currentRowWorthValues.isEmpty()) {
+            // 1) Read next row
+            List<Cell> scanResult = new ArrayList<Cell>();
+            hasMoreRows = internalScanner.next(scanResult);
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("Row: Result {} limit {} more rows? {}", scanResult, limit, hasMoreRows);
+            }
+            // 2) Traverse result list separating normal cells from shadow
+            // cells and building a map to access easily the shadow cells.
+            SortedMap<Cell, Optional<Cell>> cellToSc = CellUtils.mapCellsToShadowCells(scanResult);
+
+            // 3) traverse the list of row key values isolated before and
+            // check which ones should be discarded
+            Map<String, CellInfo> lastTimestampedCellsInRow = new HashMap<>();
+            PeekingIterator<Map.Entry<Cell, Optional<Cell>>> iter
+                    = Iterators.peekingIterator(cellToSc.entrySet().iterator());
+            while (iter.hasNext()) {
+                Map.Entry<Cell, Optional<Cell>> entry = iter.next();
+                Cell cell = entry.getKey();
+                Optional<Cell> shadowCellOp = entry.getValue();
+
+                if (cell.getTimestamp() > lowWatermark) {
+                    retain(currentRowWorthValues, cell, shadowCellOp);
+                    continue;
+                }
+
+                if (shouldRetainNonTransactionallyDeletedCell(cell)) {
+                    retain(currentRowWorthValues, cell, shadowCellOp);
+                    continue;
+                }
+
+                // During a minor compaction the coprocessor may only see a
+                // subset of store files and may not have the all the versions
+                // of a cell available for consideration. Therefore, if it
+                // deletes a cell with a tombstone during a minor compaction,
+                // an older version of the cell may become visible again. So,
+                // we have to remove tombstones only in major compactions.
+                if (isMajorCompaction) {
+                    if (CellUtils.isTombstone(cell)) {
+                        if (shadowCellOp.isPresent()) {
+                            skipToNextColumn(cell, iter);
+                        } else {
+                            Optional<CommitTimestamp> commitTimestamp = queryCommitTimestamp(cell);
+                            // Clean the cell only if it is valid
+                            if (commitTimestamp.isPresent() && commitTimestamp.get().isValid()) {
+                                skipToNextColumn(cell, iter);
+                            }
+                        }
+                        continue;
+                    }
+                }
+
+                if (shadowCellOp.isPresent()) {
+                    saveLastTimestampedCell(lastTimestampedCellsInRow, cell, shadowCellOp.get());
+                } else {
+                    Optional<CommitTimestamp> commitTimestamp = queryCommitTimestamp(cell);
+                    if (commitTimestamp.isPresent() && commitTimestamp.get().isValid()) {
+                        // Build the missing shadow cell...
+                        byte[] shadowCellValue = Bytes.toBytes(commitTimestamp.get().getValue());
+                        Cell shadowCell = CellUtils.buildShadowCellFromCell(cell, shadowCellValue);
+                        saveLastTimestampedCell(lastTimestampedCellsInRow, cell, shadowCell);
+                    } else {
+                        LOG.trace("Discarding cell {}", cell);
+                    }
+                }
+            }
+            retainLastTimestampedCellsSaved(currentRowWorthValues, lastTimestampedCellsInRow);
+
+            // 4) Sort the list
+            Collections.sort(currentRowWorthValues, KeyValue.COMPARATOR);
+        }
+
+        // Chomp current row worth values up to the limit
+        if (currentRowWorthValues.size() <= limit) {
+            result.addAll(currentRowWorthValues);
+            currentRowWorthValues.clear();
+        } else {
+            result.addAll(currentRowWorthValues.subList(0, limit));
+            currentRowWorthValues.subList(0, limit).clear();
+        }
+        LOG.trace("Results to preserve {}", result);
+
+        return hasMoreRows;
+    }
+
+    @Override
+    public void close() throws IOException {
+        internalScanner.close();
+        commitTableClientQueue.add(commitTableClient);
+    }
+
+    // ----------------------------------------------------------------------------------------------------------------
+    // Helper methods
+    // ----------------------------------------------------------------------------------------------------------------
+
+    @VisibleForTesting
+    public boolean shouldRetainNonTransactionallyDeletedCell(Cell cell) {
+        return (CellUtil.isDelete(cell) || CellUtil.isDeleteFamily(cell))
+                &&
+                retainNonTransactionallyDeletedCells;
+    }
+
+    private void saveLastTimestampedCell(Map<String, CellInfo> lastCells, Cell cell, Cell shadowCell) {
+        String cellKey = Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength())
+                + ":"
+                + Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
+        LOG.trace("Cell Key: {}", cellKey);
+
+        if (!lastCells.containsKey(cellKey)) {
+            lastCells.put(cellKey, new CellInfo(cell, shadowCell));
+        } else {
+            if (lastCells.get(cellKey).getTimestamp() < cell.getTimestamp()) {
+                lastCells.put(cellKey, new CellInfo(cell, shadowCell));
+            } else {
+                LOG.trace("Forgetting old cell {}", cell);
+            }
+        }
+    }
+
+    private long getLowWatermarkFromCommitTable() throws IOException {
+        try {
+            LOG.trace("About to read log watermark from commit table");
+            return commitTableClient.readLowWatermark().get();
+        } catch (InterruptedException ie) {
+            Thread.currentThread().interrupt();
+            LOG.warn("Interrupted getting low watermark from commit table", ie);
+            throw new IOException("Interrupted getting low watermark from commit table");
+        } catch (ExecutionException ee) {
+            LOG.warn("Problem getting low watermark from commit table");
+            throw new IOException("Problem getting low watermark from commit table", ee.getCause());
+        }
+    }
+
+    private Optional<CommitTimestamp> queryCommitTimestamp(Cell cell) throws IOException {
+        try {
+            Optional<CommitTimestamp> ct = commitTableClient.getCommitTimestamp(cell.getTimestamp()).get();
+            if (ct.isPresent()) {
+                return Optional.of(ct.get());
+            } else {
+                Get g = new Get(CellUtil.cloneRow(cell));
+                byte[] family = CellUtil.cloneFamily(cell);
+                byte[] qualifier = CellUtils.addShadowCellSuffix(cell.getQualifierArray(),
+                        cell.getQualifierOffset(),
+                        cell.getQualifierLength());
+                g.addColumn(family, qualifier);
+                g.setTimeStamp(cell.getTimestamp());
+                Result r = regionWrapper.get(g);
+                if (r.containsColumn(family, qualifier)) {
+                    return Optional.of(new CommitTimestamp(SHADOW_CELL,
+                            Bytes.toLong(r.getValue(family, qualifier)), true));
+                }
+            }
+        } catch (InterruptedException e) {
+            Thread.currentThread().interrupt();
+            throw new IOException("Interrupted while getting commit timestamp from commit table");
+        } catch (ExecutionException e) {
+            throw new IOException("Error getting commit timestamp from commit table", e);
+        }
+
+        return Optional.absent();
+    }
+
+    private void retain(List<Cell> result, Cell cell, Optional<Cell> shadowCell) {
+        LOG.trace("Retaining cell {}", cell);
+        result.add(cell);
+        if (shadowCell.isPresent()) {
+            LOG.trace("...with shadow cell {}", cell, shadowCell.get());
+            result.add(shadowCell.get());
+        } else {
+            LOG.trace("...without shadow cell! (TS is above Low Watermark)");
+        }
+    }
+
+    private void retainLastTimestampedCellsSaved(List<Cell> result, Map<String, CellInfo> lastTimestampedCellsInRow) {
+        for (CellInfo cellInfo : lastTimestampedCellsInRow.values()) {
+            LOG.trace("Retaining last cell {} with shadow cell {}", cellInfo.getCell(), cellInfo.getShadowCell());
+            result.add(cellInfo.getCell());
+            result.add(cellInfo.getShadowCell());
+        }
+    }
+
+    private void skipToNextColumn(Cell cell, PeekingIterator<Map.Entry<Cell, Optional<Cell>>> iter) {
+        while (iter.hasNext()
+                && CellUtil.matchingFamily(iter.peek().getKey(), cell)
+                && CellUtil.matchingQualifier(iter.peek().getKey(), cell)) {
+            iter.next();
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/hbase-coprocessor/src/main/java/org/apache/omid/transaction/OmidCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-coprocessor/src/main/java/org/apache/omid/transaction/OmidCompactor.java b/hbase-coprocessor/src/main/java/org/apache/omid/transaction/OmidCompactor.java
index 887a2f6..05e6608 100644
--- a/hbase-coprocessor/src/main/java/org/apache/omid/transaction/OmidCompactor.java
+++ b/hbase-coprocessor/src/main/java/org/apache/omid/transaction/OmidCompactor.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-import org.apache.hadoop.hbase.regionserver.CompactorScanner;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScanType;
 import org.apache.hadoop.hbase.regionserver.Store;
@@ -52,8 +51,8 @@ public class OmidCompactor extends BaseRegionObserver {
 
     private static final Logger LOG = LoggerFactory.getLogger(OmidCompactor.class);
 
-    private static final String HBASE_RETAIN_NON_TRANSACTIONALLY_DELETED_CELLS_KEY
-            = "omid.hbase.compactor.retain.tombstones";
+    private static final String HBASE_RETAIN_NON_TRANSACTIONALLY_DELETED_CELLS_KEY =
+            "omid.hbase.compactor.retain.tombstones";
     private static final boolean HBASE_RETAIN_NON_TRANSACTIONALLY_DELETED_CELLS_DEFAULT = true;
 
     final static String OMID_COMPACTABLE_CF_FLAG = "OMID_ENABLED";
@@ -85,7 +84,7 @@ public class OmidCompactor extends BaseRegionObserver {
         }
         retainNonTransactionallyDeletedCells =
                 conf.getBoolean(HBASE_RETAIN_NON_TRANSACTIONALLY_DELETED_CELLS_KEY,
-                        HBASE_RETAIN_NON_TRANSACTIONALLY_DELETED_CELLS_DEFAULT);
+                                HBASE_RETAIN_NON_TRANSACTIONALLY_DELETED_CELLS_DEFAULT);
         LOG.info("Compactor coprocessor started");
     }
 
@@ -106,12 +105,11 @@ public class OmidCompactor extends BaseRegionObserver {
                                       InternalScanner scanner,
                                       ScanType scanType,
                                       CompactionRequest request) throws IOException {
+
         HTableDescriptor desc = e.getEnvironment().getRegion().getTableDesc();
-        HColumnDescriptor famDesc
-                = desc.getFamily(Bytes.toBytes(store.getColumnFamilyName()));
+        HColumnDescriptor famDesc = desc.getFamily(Bytes.toBytes(store.getColumnFamilyName()));
         boolean omidCompactable = Boolean.valueOf(famDesc.getValue(OMID_COMPACTABLE_CF_FLAG));
-        // only column families tagged as compactable are compacted
-        // with omid compactor
+        // only column families tagged as compactable are compacted with omid compactor
         if (!omidCompactable) {
             return scanner;
         } else {
@@ -121,12 +119,13 @@ public class OmidCompactor extends BaseRegionObserver {
             }
             boolean isMajorCompaction = request.isMajor();
             return new CompactorScanner(e,
-                    scanner,
-                    commitTableClient,
-                    commitTableClientQueue,
-                    isMajorCompaction,
-                    retainNonTransactionallyDeletedCells);
+                                        scanner,
+                                        commitTableClient,
+                                        commitTableClientQueue,
+                                        isMajorCompaction,
+                                        retainNonTransactionallyDeletedCells);
         }
+
     }
 
     private CommitTable.Client initAndGetCommitTableClient() throws IOException {

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/hbase-coprocessor/src/test/java/org/apache/omid/transaction/TestCompactorScanner.java
----------------------------------------------------------------------
diff --git a/hbase-coprocessor/src/test/java/org/apache/omid/transaction/TestCompactorScanner.java b/hbase-coprocessor/src/test/java/org/apache/omid/transaction/TestCompactorScanner.java
index 8a217b3..e416849 100644
--- a/hbase-coprocessor/src/test/java/org/apache/omid/transaction/TestCompactorScanner.java
+++ b/hbase-coprocessor/src/test/java/org/apache/omid/transaction/TestCompactorScanner.java
@@ -23,7 +23,6 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-import org.apache.hadoop.hbase.regionserver.CompactorScanner;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.util.Bytes;

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/hbase-shims/hbase-0/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java
----------------------------------------------------------------------
diff --git a/hbase-shims/hbase-0/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java b/hbase-shims/hbase-0/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java
deleted file mode 100644
index 23742b6..0000000
--- a/hbase-shims/hbase-0/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.regionserver;
-
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Result;
-
-import java.io.IOException;
-
-public class Region {
-
-    HRegion hRegion;
-
-    public Region(HRegion hRegion) {
-
-        this.hRegion = hRegion;
-
-    }
-
-    Result get(Get getOperation) throws IOException {
-
-        return hRegion.get(getOperation);
-
-    }
-
-    HRegionInfo getRegionInfo() {
-
-        return hRegion.getRegionInfo();
-
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/hbase-shims/hbase-0/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
----------------------------------------------------------------------
diff --git a/hbase-shims/hbase-0/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java b/hbase-shims/hbase-0/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
index 56990f6..52de47a 100644
--- a/hbase-shims/hbase-0/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
+++ b/hbase-shims/hbase-0/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
+// IMPORTANT NOTE: This class is here only for compatibility reasons with HBase 1.x. The methods in this class
+// shouldn't be invoked or an error will be thrown because of this:
 public class ScannerContext {
 
     int getBatchLimit() {

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/hbase-shims/hbase-0/src/main/java/org/apache/omid/HBaseShims.java
----------------------------------------------------------------------
diff --git a/hbase-shims/hbase-0/src/main/java/org/apache/omid/HBaseShims.java b/hbase-shims/hbase-0/src/main/java/org/apache/omid/HBaseShims.java
index e66afca..cbbb815 100644
--- a/hbase-shims/hbase-0/src/main/java/org/apache/omid/HBaseShims.java
+++ b/hbase-shims/hbase-0/src/main/java/org/apache/omid/HBaseShims.java
@@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
-import org.apache.hadoop.hbase.regionserver.Region;
+import org.apache.hadoop.hbase.regionserver.ScannerContext;
 
 import java.io.IOException;
 
@@ -36,9 +36,9 @@ public class HBaseShims {
 
     }
 
-    static public Region getRegionCoprocessorRegion(RegionCoprocessorEnvironment env) {
+    static public RegionWrapper getRegionCoprocessorRegion(RegionCoprocessorEnvironment env) {
 
-        return new Region(env.getRegion());
+        return new RegionWrapper(env.getRegion());
 
     }
 
@@ -56,4 +56,10 @@ public class HBaseShims {
 
     }
 
+    public static int getBatchLimit(ScannerContext scannerContext) {
+
+        throw new UnsupportedOperationException("Should not be called for 0.9x versions");
+
+    }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/hbase-shims/hbase-0/src/main/java/org/apache/omid/RegionWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-shims/hbase-0/src/main/java/org/apache/omid/RegionWrapper.java b/hbase-shims/hbase-0/src/main/java/org/apache/omid/RegionWrapper.java
new file mode 100644
index 0000000..2fc19aa
--- /dev/null
+++ b/hbase-shims/hbase-0/src/main/java/org/apache/omid/RegionWrapper.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.omid;
+
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+
+import java.io.IOException;
+
+/**
+ * Wrapper over  {@link org.apache.hadoop.hbase.regionserver.HRegion} interface in HBase 0.9x versions
+ */
+public class RegionWrapper {
+
+    HRegion hRegion;
+
+    public RegionWrapper(HRegion hRegion) {
+
+        this.hRegion = hRegion;
+
+    }
+
+    public Result get(Get getOperation) throws IOException {
+
+        return hRegion.get(getOperation);
+
+    }
+
+    public HRegionInfo getRegionInfo() {
+
+        return hRegion.getRegionInfo();
+
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/hbase-shims/hbase-1/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-shims/hbase-1/pom.xml b/hbase-shims/hbase-1/pom.xml
index bf141c4..1c412cc 100644
--- a/hbase-shims/hbase-1/pom.xml
+++ b/hbase-shims/hbase-1/pom.xml
@@ -16,4 +16,18 @@
         <hbase.version>${hbase1.version}</hbase.version>
     </properties>
 
+    <dependencies>
+
+        <!-- Dependencies on Omid modules -->
+
+        <dependency>
+            <groupId>org.apache.omid</groupId>
+            <artifactId>omid-common</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+
+        <!-- End of Dependencies on Omid modules -->
+
+    </dependencies>
+
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/hbase-shims/hbase-1/src/main/java/org/apache/omid/HBaseShims.java
----------------------------------------------------------------------
diff --git a/hbase-shims/hbase-1/src/main/java/org/apache/omid/HBaseShims.java b/hbase-shims/hbase-1/src/main/java/org/apache/omid/HBaseShims.java
index 636e892..86d97cc 100644
--- a/hbase-shims/hbase-1/src/main/java/org/apache/omid/HBaseShims.java
+++ b/hbase-shims/hbase-1/src/main/java/org/apache/omid/HBaseShims.java
@@ -24,8 +24,10 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.Region;
+import org.apache.hadoop.hbase.regionserver.ScannerContext;
 
 import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
 
 public class HBaseShims {
 
@@ -35,9 +37,9 @@ public class HBaseShims {
 
     }
 
-    static public Region getRegionCoprocessorRegion(RegionCoprocessorEnvironment env) {
+    static public RegionWrapper getRegionCoprocessorRegion(RegionCoprocessorEnvironment env) {
 
-        return env.getRegion();
+        return new RegionWrapper(env.getRegion());
 
     }
 
@@ -55,4 +57,19 @@ public class HBaseShims {
 
     }
 
+    static public int getBatchLimit(ScannerContext scannerContext) throws IOException {
+
+        // Invoke scannerContext.getBatchLimit() through reflection as is not accessible in HBase 1.x version
+        try {
+            return (int) ReflectionHelper.invokeParameterlessMethod(scannerContext, "getBatchLimit");
+        } catch (NoSuchMethodException e) {
+            throw new IOException("Can't find getBatchLimit method in ScannerContext through reflection", e);
+        } catch (IllegalAccessException e) {
+            throw new IOException("Can't access getBatchLimit method in ScannerContext through reflection", e);
+        } catch (InvocationTargetException e) {
+            throw new IOException("Exception thrown in calling getBatchLimit method through reflection", e);
+        }
+
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/hbase-shims/hbase-1/src/main/java/org/apache/omid/RegionWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-shims/hbase-1/src/main/java/org/apache/omid/RegionWrapper.java b/hbase-shims/hbase-1/src/main/java/org/apache/omid/RegionWrapper.java
new file mode 100644
index 0000000..b589ec1
--- /dev/null
+++ b/hbase-shims/hbase-1/src/main/java/org/apache/omid/RegionWrapper.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.omid;
+
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.regionserver.Region;
+
+import java.io.IOException;
+
+/**
+ * Wrapper over {@link org.apache.hadoop.hbase.regionserver.Region} interface in HBase 1.x versions
+ */
+public class RegionWrapper {
+
+    Region region;
+
+    public RegionWrapper(Region region) {
+
+        this.region = region;
+
+    }
+
+    public Result get(Get getOperation) throws IOException {
+
+        return region.get(getOperation);
+
+    }
+
+    public HRegionInfo getRegionInfo() {
+
+        return region.getRegionInfo();
+
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-omid/blob/0b5a7b59/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index dae90e7..f61dcc8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -135,7 +135,7 @@
 
         <!-- 3rd-Party Library Versioning -->
         <hbase0.version>0.98.10.1-hadoop1</hbase0.version>
-        <hbase1.version>1.1.1</hbase1.version>
+        <hbase1.version>1.3.1</hbase1.version>
         <guava.version>14.0.1</guava.version>
         <guice.version>3.0</guice.version>
         <testng.version>6.8.8</testng.version>