You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ja...@apache.org on 2018/09/28 16:53:03 UTC

phoenix git commit: Upgrade to Tephra 0.15 and use Table instead of HTableInterface

Repository: phoenix
Updated Branches:
  refs/heads/omid2 790064b54 -> 862929248


Upgrade to Tephra 0.15 and use Table instead of HTableInterface


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/86292924
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/86292924
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/86292924

Branch: refs/heads/omid2
Commit: 8629292483ac24b53539370654d2541c0d504486
Parents: 790064b
Author: James Taylor <ja...@apache.org>
Authored: Fri Sep 28 09:52:52 2018 -0700
Committer: James Taylor <ja...@apache.org>
Committed: Fri Sep 28 09:52:52 2018 -0700

----------------------------------------------------------------------
 .../phoenix/tx/FlappingTransactionIT.java       |   3 +-
 .../apache/phoenix/execute/DelegateHTable.java  |  57 +---
 .../apache/phoenix/execute/MutationState.java   |  12 +-
 .../PhoenixTxIndexMutationGenerator.java        |   3 +-
 .../phoenix/iterate/TableResultIterator.java    |   6 +-
 .../transaction/OmidTransactionContext.java     |   8 +-
 .../transaction/OmidTransactionTable.java       | 261 ++++---------------
 .../transaction/PhoenixTransactionContext.java  |  10 +-
 .../transaction/TephraTransactionContext.java   |   7 +-
 .../java/org/apache/phoenix/util/TestUtil.java  |   3 +-
 pom.xml                                         |   2 +-
 11 files changed, 79 insertions(+), 293 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/86292924/phoenix-core/src/it/java/org/apache/phoenix/tx/FlappingTransactionIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/tx/FlappingTransactionIT.java b/phoenix-core/src/it/java/org/apache/phoenix/tx/FlappingTransactionIT.java
index 3c164ea..5ba8dd9 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/tx/FlappingTransactionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/tx/FlappingTransactionIT.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.end2end.ParallelStatsDisabledIT;
 import org.apache.phoenix.exception.SQLExceptionCode;
@@ -252,7 +253,7 @@ public class FlappingTransactionIT extends ParallelStatsDisabledIT {
         // Either set txn on all existing OmidTransactionTable or throw exception
         // when attempting to get OmidTransactionTable if a txn is not in progress.
         txContext.begin(); 
-        HTableInterface txTable = txContext.getTransactionalTable(htable, false);
+        Table txTable = txContext.getTransactionalTable(htable, false);
 
         // Use HBase APIs to add a new row
         Put put = new Put(Bytes.toBytes("z"));

http://git-wip-us.apache.org/repos/asf/phoenix/blob/86292924/phoenix-core/src/main/java/org/apache/phoenix/execute/DelegateHTable.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/DelegateHTable.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/DelegateHTable.java
index f45b356..0618945 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/DelegateHTable.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/DelegateHTable.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
@@ -36,6 +35,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Row;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;
 import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
@@ -46,19 +46,14 @@ import com.google.protobuf.Message;
 import com.google.protobuf.Service;
 import com.google.protobuf.ServiceException;
 
-public class DelegateHTable implements HTableInterface {
-    protected final HTableInterface delegate;
+public class DelegateHTable implements Table {
+    protected final Table delegate;
 
-    public DelegateHTable(HTableInterface delegate) {
+    public DelegateHTable(Table delegate) {
         this.delegate = delegate;
     }
 
     @Override
-    public byte[] getTableName() {
-        return delegate.getTableName();
-    }
-
-    @Override
     public TableName getName() {
         return delegate.getName();
     }
@@ -79,11 +74,6 @@ public class DelegateHTable implements HTableInterface {
     }
 
     @Override
-    public Boolean[] exists(List<Get> gets) throws IOException {
-        return delegate.exists(gets);
-    }
-
-    @Override
     public void batch(List<? extends Row> actions, Object[] results) throws IOException, InterruptedException {
         delegate.batch(actions, results);
     }
@@ -117,12 +107,6 @@ public class DelegateHTable implements HTableInterface {
         return delegate.get(gets);
     }
 
-    @SuppressWarnings("deprecation")
-    @Override
-    public Result getRowOrBefore(byte[] row, byte[] family) throws IOException {
-        return delegate.getRowOrBefore(row, family);
-    }
-
     @Override
     public ResultScanner getScanner(Scan scan) throws IOException {
         return delegate.getScanner(scan);
@@ -195,23 +179,6 @@ public class DelegateHTable implements HTableInterface {
         return delegate.incrementColumnValue(row, family, qualifier, amount, durability);
     }
 
-    @SuppressWarnings("deprecation")
-    @Override
-    public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, boolean writeToWAL)
-            throws IOException {
-        return delegate.incrementColumnValue(row, family, qualifier, amount, writeToWAL);
-    }
-
-    @Override
-    public boolean isAutoFlush() {
-        return delegate.isAutoFlush();
-    }
-
-    @Override
-    public void flushCommits() throws IOException {
-        delegate.flushCommits();
-    }
-
     @Override
     public void close() throws IOException {
         delegate.close();
@@ -234,22 +201,6 @@ public class DelegateHTable implements HTableInterface {
         delegate.coprocessorService(service, startKey, endKey, callable, callback);
     }
 
-    @SuppressWarnings("deprecation")
-    @Override
-    public void setAutoFlush(boolean autoFlush) {
-        delegate.setAutoFlush(autoFlush);
-    }
-
-    @Override
-    public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail) {
-        delegate.setAutoFlush(autoFlush, clearBufferOnFail);
-    }
-
-    @Override
-    public void setAutoFlushTo(boolean autoFlush) {
-        delegate.setAutoFlushTo(autoFlush);
-    }
-
     @Override
     public long getWriteBufferSize() {
         return delegate.getWriteBufferSize();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/86292924/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
index 3d13239..ee5a9c5 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
@@ -39,8 +39,8 @@ import javax.annotation.Nonnull;
 import javax.annotation.concurrent.Immutable;
 
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
@@ -292,8 +292,8 @@ public class MutationState implements SQLCloseable {
     // the Transaction outside of MutationState, this seems reasonable, as the member variables
     // would not change as these threads are running. We also clone mutationState to ensure that
     // the transaction context won't change due to a commit when auto commit is true.
-    public HTableInterface getHTable(PTable table) throws SQLException {
-        HTableInterface htable = this.getConnection().getQueryServices().getTable(table.getPhysicalName().getBytes());
+    public Table getHTable(PTable table) throws SQLException {
+        Table htable = this.getConnection().getQueryServices().getTable(table.getPhysicalName().getBytes());
         if (table.isTransactional() && phoenixTransactionContext.isTransactionRunning()) {
             // We're only using this table for reading, so we want it wrapped even if it's an index
             htable = phoenixTransactionContext.getTransactionalTable(htable, table.isImmutableRows() || table.getType() == PTableType.INDEX);
@@ -532,7 +532,7 @@ public class MutationState implements SQLCloseable {
                             if (indexMutationsMap == null) {
                                 PhoenixTxIndexMutationGenerator generator = PhoenixTxIndexMutationGenerator.newGenerator(connection, table,
                                         indexList, mutationsPertainingToIndex.get(0).getAttributesMap());
-                                try (HTableInterface htable = connection.getQueryServices().getTable(
+                                try (Table htable = connection.getQueryServices().getTable(
                                         table.getPhysicalName().getBytes())) {
                                     Collection<Pair<Mutation, byte[]>> allMutations = generator.getIndexUpdates(htable,
                                             mutationsPertainingToIndex.iterator());
@@ -958,7 +958,7 @@ public class MutationState implements SQLCloseable {
                     // region servers.
                     shouldRetry = cache != null;
                     SQLException sqlE = null;
-                    HTableInterface hTable = connection.getQueryServices().getTable(htableName);
+                    Table hTable = connection.getQueryServices().getTable(htableName);
                     try {
                         if (table.isTransactional()) {
                             // Track tables to which we've sent uncommitted data
@@ -979,7 +979,7 @@ public class MutationState implements SQLCloseable {
                         for (final List<Mutation> mutationBatch : mutationBatchList) {
                             if (shouldRetryIndexedMutation) {
                                 // if there was an index write failure, retry the mutation in a loop
-                                final HTableInterface finalHTable = hTable;
+                                final Table finalHTable = hTable;
                                 PhoenixIndexFailurePolicy.doBatchWithRetries(new MutateCommand() {
                                     @Override
                                     public void doMutation() throws IOException {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/86292924/phoenix-core/src/main/java/org/apache/phoenix/execute/PhoenixTxIndexMutationGenerator.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/PhoenixTxIndexMutationGenerator.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/PhoenixTxIndexMutationGenerator.java
index 1737911..8a94314 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/PhoenixTxIndexMutationGenerator.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/PhoenixTxIndexMutationGenerator.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -113,7 +112,7 @@ public class PhoenixTxIndexMutationGenerator {
         stored.addAll(m);
     }
 
-    public Collection<Pair<Mutation, byte[]>> getIndexUpdates(HTableInterface htable, Iterator<? extends Mutation> mutationIterator) throws IOException, SQLException {
+    public Collection<Pair<Mutation, byte[]>> getIndexUpdates(Table htable, Iterator<? extends Mutation> mutationIterator) throws IOException, SQLException {
 
         if (!mutationIterator.hasNext()) {
             return Collections.emptyList();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/86292924/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableResultIterator.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableResultIterator.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableResultIterator.java
index f1d1663..2bc9e39 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableResultIterator.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableResultIterator.java
@@ -37,8 +37,8 @@ import java.util.concurrent.locks.ReentrantLock;
 import javax.annotation.concurrent.GuardedBy;
 
 import org.apache.hadoop.hbase.client.AbstractClientScanner;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.cache.ServerCacheClient.ServerCache;
@@ -74,7 +74,7 @@ import com.google.common.annotations.VisibleForTesting;
  */
 public class TableResultIterator implements ResultIterator {
     private final Scan scan;
-    private final HTableInterface htable;
+    private final Table htable;
     private final ScanMetricsHolder scanMetricsHolder;
     private static final ResultIterator UNINITIALIZED_SCANNER = ResultIterator.EMPTY_ITERATOR;
     private final long renewLeaseThreshold;
@@ -187,7 +187,7 @@ public class TableResultIterator implements ResultIterator {
                                 newScan.setStartRow(ByteUtil.nextKey(startRowSuffix));
                             }
                         }
-                        plan.getContext().getConnection().getQueryServices().clearTableRegionCache(htable.getTableName());
+                        plan.getContext().getConnection().getQueryServices().clearTableRegionCache(htable.getName().getName());
                         if (e1 instanceof HashJoinCacheNotFoundException) {
                             logger.debug(
                                     "Retrying when Hash Join cache is not found on the server ,by sending the cache again");

http://git-wip-us.apache.org/repos/asf/phoenix/blob/86292924/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionContext.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionContext.java b/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionContext.java
index cae6a84..42aeb08 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionContext.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionContext.java
@@ -22,7 +22,7 @@ import java.util.Arrays;
 import java.util.HashSet;
 import java.util.Set;
 
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.omid.proto.TSOProto;
 import org.apache.omid.transaction.AbstractTransaction.VisibilityLevel;
 import org.apache.omid.transaction.HBaseCellId;
@@ -299,8 +299,6 @@ public class OmidTransactionContext implements PhoenixTransactionContext {
 
     @Override
     public void markDMLFence(PTable dataTable) {
-        // TODO Auto-generated method stub
-        
     }
 
     /**
@@ -313,12 +311,12 @@ public class OmidTransactionContext implements PhoenixTransactionContext {
 
 
     @Override
-    public HTableInterface getTransactionalTable(HTableInterface htable, boolean isConflictFree) throws SQLException {
+    public Table getTransactionalTable(Table htable, boolean isConflictFree) throws SQLException {
         return new OmidTransactionTable(this, htable, isConflictFree);
     }
 
     @Override
-    public HTableInterface getTransactionalTableWriter(PhoenixConnection connection, PTable table, HTableInterface htable, boolean isIndex) throws SQLException {
+    public Table getTransactionalTableWriter(PhoenixConnection connection, PTable table, Table htable, boolean isIndex) throws SQLException {
         return new OmidTransactionTable(this, htable, table.isImmutableRows() || isIndex);
     }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/86292924/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java b/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
index 2088622..70625d2 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
@@ -19,7 +19,7 @@ package org.apache.phoenix.transaction;
 
 import java.io.IOException;
 import java.sql.SQLException;
-import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 
@@ -30,7 +30,6 @@ import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
@@ -38,11 +37,11 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Row;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;
 import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-import org.apache.hadoop.hdfs.server.namenode.UnsupportedActionException;
 import org.apache.omid.transaction.TTable;
 import org.apache.omid.transaction.Transaction;
 import org.apache.phoenix.exception.SQLExceptionCode;
@@ -53,23 +52,23 @@ import com.google.protobuf.Message;
 import com.google.protobuf.Service;
 import com.google.protobuf.ServiceException;
 
-public class OmidTransactionTable implements HTableInterface {
+public class OmidTransactionTable implements Table {
+    // Copied from HBase ProtobufUtil since it's not accessible
+    final static Result EMPTY_RESULT_EXISTS_TRUE = Result.create(null, true);
 
     private TTable tTable;
     private Transaction tx;
-    private boolean conflictFree;
 
     public OmidTransactionTable() throws SQLException {
         this.tTable = null;
         this.tx = null;
-        this.conflictFree = false;
     }
 
-    public OmidTransactionTable(PhoenixTransactionContext ctx, HTableInterface hTable) throws SQLException {
+    public OmidTransactionTable(PhoenixTransactionContext ctx, Table hTable) throws SQLException {
         this(ctx, hTable, false);
     }
 
-    public OmidTransactionTable(PhoenixTransactionContext ctx, HTableInterface hTable, boolean isImmutable) throws SQLException  {
+    public OmidTransactionTable(PhoenixTransactionContext ctx, Table hTable, boolean isImmutable) throws SQLException  {
         assert(ctx instanceof OmidTransactionContext);
 
         OmidTransactionContext omidTransactionContext = (OmidTransactionContext) ctx;
@@ -84,12 +83,6 @@ public class OmidTransactionTable implements HTableInterface {
         }
 
         this.tx = omidTransactionContext.getTransaction();
-
-//        if (pTable != null && pTable.getType() != PTableType.INDEX) {
-//            omidTransactionContext.markDMLFence(pTable);
-//        }
-
-        this.conflictFree = isImmutable;
     }
 
     @Override
@@ -114,11 +107,6 @@ public class OmidTransactionTable implements HTableInterface {
     }
 
     @Override
-    public byte[] getTableName() {
-        return tTable.getTableName();
-    }
-
-    @Override
     public Configuration getConfiguration() {
         return tTable.getConfiguration();
     }
@@ -151,7 +139,7 @@ public class OmidTransactionTable implements HTableInterface {
 
     @Override
     public void put(List<Put> puts) throws IOException {
-        throw new UnsupportedActionException("Function put(List<Put>) is not supported");
+        tTable.put(tx, puts);
     }
 
     @Override
@@ -160,231 +148,119 @@ public class OmidTransactionTable implements HTableInterface {
     }
 
     @Override
-    public void setAutoFlush(boolean autoFlush) {
-        tTable.setAutoFlush(autoFlush);
-    }
-
-    @Override
-    public boolean isAutoFlush() {
-        return tTable.isAutoFlush();
-    }
-
-    @Override
-    public long getWriteBufferSize() {
-        return tTable.getWriteBufferSize();
-    }
-
-    @Override
-    public void setWriteBufferSize(long writeBufferSize) throws IOException {
-        tTable.setWriteBufferSize(writeBufferSize);
-    }
-
-    @Override
-    public void flushCommits() throws IOException {
-        tTable.flushCommits();
-    }
-
-    @Override
     public void close() throws IOException {
         tTable.close();
     }
 
     @Override
-    public long incrementColumnValue(byte[] row, byte[] family,
-            byte[] qualifier, long amount, boolean writeToWAL)
-            throws IOException {
-        // TODO Auto-generated method stub
-        return 0;
-    }
-
-    @Override
-    public Boolean[] exists(List<Get> gets) throws IOException {
-            // TODO Auto-generated method stub
-            return null;
-    }
-
-    @Override
-    public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail) {
-        // TODO Auto-generated method stub
-    }
-
-    @Override
-    public void setAutoFlushTo(boolean autoFlush) {
-        tTable.setAutoFlush(autoFlush);
-    }
-
-    @Override
-    public Result getRowOrBefore(byte[] row, byte[] family) throws IOException {
-        throw new UnsupportedActionException("Function getRowOrBefore is not supported");
-//        return null;
-    }
-
-    @Override
     public TableName getName() {
-        assert(false);
-        // TODO Auto-generated method stub
-        return null;
+        byte[] name = tTable.getTableName();
+        return TableName.valueOf(name);
     }
 
     @Override
     public boolean[] existsAll(List<Get> gets) throws IOException {
-        throw new UnsupportedActionException("Function existsAll is not supported");
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public void batch(List<? extends Row> actions, Object[] results)
             throws IOException, InterruptedException {
-        assert(false);
-
-        // TODO Auto-generated method stub
+        tTable.batch(tx, actions);
+        Arrays.fill(results, EMPTY_RESULT_EXISTS_TRUE);
     }
 
     @Override
     public Object[] batch(List<? extends Row> actions) throws IOException,
             InterruptedException {
-        List<Put> putList = new ArrayList<Put>();
-
-       for (Row row : actions) {
-           if (row instanceof Put) {
-               Put put = (Put) row;
-               if (conflictFree) {
-                   tTable.markPutAsConflictFreeMutation(put);
-               }
-               putList.add(put);
-           } else {
-               // TODO implement delete batch
-               assert (row instanceof Delete);
-               this.delete((Delete) row);
-           }
-       }
-
-       tTable.put(tx, putList);
-
-       return null;
+        Object[] results;
+        batch(actions, results = new Object[actions.size()]);
+        return results;
     }
 
     @Override
     public <R> void batchCallback(List<? extends Row> actions,
             Object[] results, Callback<R> callback) throws IOException,
             InterruptedException {
-        assert(false);
-
-        // TODO Auto-generated method stub
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public <R> Object[] batchCallback(List<? extends Row> actions,
             Callback<R> callback) throws IOException, InterruptedException {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return null;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
             byte[] value, Put put) throws IOException {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return false;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
             CompareOp compareOp, byte[] value, Put put) throws IOException {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return false;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
             byte[] value, Delete delete) throws IOException {
-        // TODO Auto-generated method stub
-        assert(false);
-
-        return false;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
             CompareOp compareOp, byte[] value, Delete delete)
             throws IOException {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return false;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public void mutateRow(RowMutations rm) throws IOException {
-        assert(false);
-
-        // TODO Auto-generated method stub
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public Result append(Append append) throws IOException {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return null;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public Result increment(Increment increment) throws IOException {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return null;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public long incrementColumnValue(byte[] row, byte[] family,
             byte[] qualifier, long amount) throws IOException {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return 0;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public long incrementColumnValue(byte[] row, byte[] family,
             byte[] qualifier, long amount, Durability durability)
             throws IOException {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return 0;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public CoprocessorRpcChannel coprocessorService(byte[] row) {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return null;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public <T extends Service, R> Map<byte[], R> coprocessorService(
             Class<T> service, byte[] startKey, byte[] endKey,
             Call<T, R> callable) throws ServiceException, Throwable {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return null;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public <T extends Service, R> void coprocessorService(Class<T> service,
             byte[] startKey, byte[] endKey, Call<T, R> callable,
             Callback<R> callback) throws ServiceException, Throwable {
-        assert(false);
-
-        // TODO Auto-generated method stub
+        throw new UnsupportedOperationException();
     }
 
     @Override
@@ -392,10 +268,7 @@ public class OmidTransactionTable implements HTableInterface {
             MethodDescriptor methodDescriptor, Message request,
             byte[] startKey, byte[] endKey, R responsePrototype)
             throws ServiceException, Throwable {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return null;
+        throw new UnsupportedOperationException();
     }
 
     @Override
@@ -403,81 +276,43 @@ public class OmidTransactionTable implements HTableInterface {
             MethodDescriptor methodDescriptor, Message request,
             byte[] startKey, byte[] endKey, R responsePrototype,
             Callback<R> callback) throws ServiceException, Throwable {
-        assert(false);
-
-        // TODO Auto-generated method stub
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
             CompareOp compareOp, byte[] value, RowMutations mutation)
             throws IOException {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return false;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public int getOperationTimeout() {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return 0;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public int getRpcTimeout() {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        return 0;
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public void setOperationTimeout(int arg0) {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        
+        throw new UnsupportedOperationException();
     }
 
     @Override
     public void setRpcTimeout(int arg0) {
-        assert(false);
-
-        // TODO Auto-generated method stub
-        
-    }
-
-//    @Override
-//    public int getReadRpcTimeout() {
-//      assert(false);
-//
-//        // TODO Auto-generated method stub
-//      return 0;
-//    }
-//
-//    @Override
-//    public void setReadRpcTimeout(int readRpcTimeout) {
-//      assert(false);
-//
-//      // TODO Auto-generated method stub
-//    }
-//
-//    @Override
-//    public int getWriteRpcTimeout() {
-//      assert(false);
-//
-//      // TODO Auto-generated method stub
-//      return 0;
-//    }
-//
-//    @Override
-//    public void setWriteRpcTimeout(int writeRpcTimeout) {
-//      assert(false);
-//
-//      // TODO Auto-generated method stub
-//
-//    }
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public long getWriteBufferSize() {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void setWriteBufferSize(long writeBufferSize) throws IOException {
+        throw new UnsupportedOperationException();
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/86292924/phoenix-core/src/main/java/org/apache/phoenix/transaction/PhoenixTransactionContext.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/transaction/PhoenixTransactionContext.java b/phoenix-core/src/main/java/org/apache/phoenix/transaction/PhoenixTransactionContext.java
index 9141f89..1882004 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/transaction/PhoenixTransactionContext.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/transaction/PhoenixTransactionContext.java
@@ -20,7 +20,7 @@ package org.apache.phoenix.transaction;
 import java.sql.SQLException;
 import java.util.concurrent.TimeoutException;
 
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.transaction.TransactionFactory.Provider;
@@ -106,12 +106,12 @@ public interface PhoenixTransactionContext {
         }
 
         @Override
-        public HTableInterface getTransactionalTable(HTableInterface htable, boolean isConflictFree) {
+        public Table getTransactionalTable(Table htable, boolean isConflictFree) {
             return null;
         }
 
         @Override
-        public HTableInterface getTransactionalTableWriter(PhoenixConnection connection, PTable table, HTableInterface htable, boolean isIndex) {
+        public Table getTransactionalTableWriter(PhoenixConnection connection, PTable table, Table htable, boolean isIndex) {
             return null;
         }
     };
@@ -230,6 +230,6 @@ public interface PhoenixTransactionContext {
     public Provider getProvider();
     public PhoenixTransactionContext newTransactionContext(PhoenixTransactionContext contex, boolean subTask);
 
-    public HTableInterface getTransactionalTable(HTableInterface htable, boolean isConflictFree) throws SQLException;
-    public HTableInterface getTransactionalTableWriter(PhoenixConnection connection, PTable table, HTableInterface htable, boolean isIndex) throws SQLException;
+    public Table getTransactionalTable(Table htable, boolean isConflictFree) throws SQLException;
+    public Table getTransactionalTableWriter(PhoenixConnection connection, PTable table, Table htable, boolean isIndex) throws SQLException;
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/86292924/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java b/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
index bd0e02e..18a05ce 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
@@ -409,14 +410,14 @@ public class TephraTransactionContext implements PhoenixTransactionContext {
     }
     
     @Override
-    public HTableInterface getTransactionalTable(HTableInterface htable, boolean isConflictFree) {
+    public Table getTransactionalTable(Table htable, boolean isConflictFree) {
         TransactionAwareHTable transactionAwareHTable = new TransactionAwareHTable(htable, isConflictFree ? TxConstants.ConflictDetection.NONE : TxConstants.ConflictDetection.ROW);
         this.addTransactionAware(transactionAwareHTable);
         return transactionAwareHTable;
     }
 
     @Override
-    public HTableInterface getTransactionalTableWriter(PhoenixConnection connection, PTable table, HTableInterface htable, boolean isIndex) throws SQLException {
+    public Table getTransactionalTableWriter(PhoenixConnection connection, PTable table, Table htable, boolean isIndex) throws SQLException {
         // If we have indexes, wrap the HTable in a delegate HTable that
         // will attach the necessary index meta data in the event of a
         // rollback
@@ -450,7 +451,7 @@ public class TephraTransactionContext implements PhoenixTransactionContext {
         private final PTable table;
         private final PhoenixConnection connection;
 
-        private RollbackHookHTableWrapper(HTableInterface delegate, PTable table, PhoenixConnection connection) {
+        private RollbackHookHTableWrapper(Table delegate, PTable table, PhoenixConnection connection) {
             super(delegate);
             this.table = table;
             this.connection = connection;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/86292924/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java b/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
index 14c04c2..f0a26b9 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
@@ -63,6 +63,7 @@ import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -795,7 +796,7 @@ public class TestUtil {
         if (table.isTransactional()) {
             mutationState.startTransaction(table.getTransactionProvider());
         }
-        try (HTableInterface htable = mutationState.getHTable(table)) {
+        try (Table htable = mutationState.getHTable(table)) {
             byte[] markerRowKey = Bytes.toBytes("TO_DELETE");
            
             Put put = new Put(markerRowKey);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/86292924/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 8e1f05c..f995134 100644
--- a/pom.xml
+++ b/pom.xml
@@ -100,7 +100,7 @@
     <joni.version>2.1.2</joni.version>
     <avatica.version>1.12.0</avatica.version>
     <jettyVersion>8.1.7.v20120910</jettyVersion>
-    <tephra.version>0.14.0-incubating</tephra.version>
+    <tephra.version>0.15.0-incubating</tephra.version>
     <omid.version>0.8.2.11-SNAPSHOT</omid.version>
     <spark.version>2.0.2</spark.version>
     <scala.version>2.11.8</scala.version>