You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by la...@apache.org on 2013/04/12 19:07:02 UTC

svn commit: r1467357 [3/3] - in /hbase/trunk: hbase-client/src/main/java/org/apache/hadoop/hbase/client/ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ hbase-protoco...

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java?rev=1467357&r1=1467356&r2=1467357&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java Fri Apr 12 17:07:00 2013
@@ -29,6 +29,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.*;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -69,8 +70,11 @@ public class TestResettingCounters {
     HRegion region = HRegion.createHRegion(hri, path, conf, htd);
     try {
       Increment odd = new Increment(rows[0]);
+      odd.setDurability(Durability.SKIP_WAL);
       Increment even = new Increment(rows[0]);
+      even.setDurability(Durability.SKIP_WAL);
       Increment all = new Increment(rows[0]);
+      all.setDurability(Durability.SKIP_WAL);
       for (int i=0;i<numQualifiers;i++) {
         if (i % 2 == 0) even.addColumn(families[0], qualifiers[i], 1);
         else odd.addColumn(families[0], qualifiers[i], 1);
@@ -78,14 +82,14 @@ public class TestResettingCounters {
       }
 
       // increment odd qualifiers 5 times and flush
-      for (int i=0;i<5;i++) region.increment(odd, false);
+      for (int i=0;i<5;i++) region.increment(odd);
       region.flushcache();
 
       // increment even qualifiers 5 times
-      for (int i=0;i<5;i++) region.increment(even, false);
+      for (int i=0;i<5;i++) region.increment(even);
 
       // increment all qualifiers, should have value=6 for all
-      Result result = region.increment(all, false);
+      Result result = region.increment(all);
       assertEquals(numQualifiers, result.size());
       KeyValue [] kvs = result.raw();
       for (int i=0;i<kvs.length;i++) {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java?rev=1467357&r1=1467356&r2=1467357&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java Fri Apr 12 17:07:00 2013
@@ -498,7 +498,7 @@ public class TestScanner extends HBaseTe
       Delete dc = new Delete(firstRowBytes);
       /* delete column1 of firstRow */
       dc.deleteColumns(fam1, col1);
-      r.delete(dc, true);
+      r.delete(dc);
       r.flushcache();
 
       addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java?rev=1467357&r1=1467356&r2=1467357&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java Fri Apr 12 17:07:00 2013
@@ -419,7 +419,7 @@ public class TestSeekOptimizations {
 
         region.put(put);
         if (!del.isEmpty()) {
-          region.delete(del, true);
+          region.delete(del);
         }
 
         // Add remaining timestamps (those we have not deleted) to expected

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java?rev=1467357&r1=1467356&r2=1467357&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java Fri Apr 12 17:07:00 2013
@@ -30,6 +30,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.hbase.*;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -69,7 +70,7 @@ public class TestWideScanner extends HBa
         byte[] b = Bytes.toBytes(String.format("%10d", i));
         for (j = 0; j < 100; j++) {
           Put put = new Put(row);
-          put.setWriteToWAL(false);
+          put.setDurability(Durability.SKIP_WAL);
           put.add(COLUMNS[rng.nextInt(COLUMNS.length)], b, ++ts, b);
           region.put(put);
           count++;

Added: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java?rev=1467357&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java (added)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java Fri Apr 12 17:07:00 2013
@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.wal;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Tests for HLog write durability
+ */
+@Category(MediumTests.class)
+public class TestDurability {
+  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+  private static FileSystem FS;
+  private static MiniDFSCluster CLUSTER;
+  private static Configuration CONF;
+  private static final Path DIR = TEST_UTIL.getDataTestDir("TestDurability");
+
+  private static byte[] FAMILY = Bytes.toBytes("family");
+  private static byte[] ROW = Bytes.toBytes("row");
+  private static byte[] COL = Bytes.toBytes("col");
+
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+    CONF = TEST_UTIL.getConfiguration();
+    CONF.setLong("hbase.regionserver.optionallogflushinterval", 500*1000);
+    TEST_UTIL.startMiniDFSCluster(1);
+
+    CLUSTER = TEST_UTIL.getDFSCluster();
+    FS = CLUSTER.getFileSystem();
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+    TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testDurability() throws Exception {
+    HLog wal = HLogFactory.createHLog(FS, DIR, "hlogdir",
+        "hlogdir_archive", CONF);
+    byte[] tableName = Bytes.toBytes("TestDurability");
+    HRegion region = createHRegion(tableName, "region", wal, false);
+    HRegion deferredRegion = createHRegion(tableName, "deferredRegion", wal, true);
+
+    region.put(newPut(null));
+
+    verifyHLogCount(wal, 1);
+
+    // a put through the deferred table does not write to the wal immdiately
+    deferredRegion.put(newPut(null));
+    verifyHLogCount(wal, 1);
+    // but will after we sync the wal
+    wal.sync();
+    verifyHLogCount(wal, 2);
+
+    // a put through a deferred table will be sync with the put sync'ed put
+    deferredRegion.put(newPut(null));
+    verifyHLogCount(wal, 2);
+    region.put(newPut(null));
+    verifyHLogCount(wal, 4);
+
+    // a put through a deferred table will be sync with the put sync'ed put
+    deferredRegion.put(newPut(Durability.USE_DEFAULT));
+    verifyHLogCount(wal, 4);
+    region.put(newPut(Durability.USE_DEFAULT));
+    verifyHLogCount(wal, 6);
+
+    // SKIP_WAL never writes to the wal
+    region.put(newPut(Durability.SKIP_WAL));
+    deferredRegion.put(newPut(Durability.SKIP_WAL));
+    verifyHLogCount(wal, 6);
+    wal.sync();
+    verifyHLogCount(wal, 6);
+
+    // async overrides sync table default
+    region.put(newPut(Durability.ASYNC_WAL));
+    deferredRegion.put(newPut(Durability.ASYNC_WAL));
+    verifyHLogCount(wal, 6);
+    wal.sync();
+    verifyHLogCount(wal, 8);
+
+    // sync overrides async table default
+    region.put(newPut(Durability.SYNC_WAL));
+    deferredRegion.put(newPut(Durability.SYNC_WAL));
+    verifyHLogCount(wal, 10);
+
+    // fsync behaves like sync
+    region.put(newPut(Durability.FSYNC_WAL));
+    deferredRegion.put(newPut(Durability.FSYNC_WAL));
+    verifyHLogCount(wal, 12);
+  }
+
+  private Put newPut(Durability durability) {
+    Put p = new Put(ROW);
+    p.add(FAMILY, COL, COL);
+    if (durability != null) {
+      p.setDurability(durability);
+    }
+    return p;
+  }
+
+  private void verifyHLogCount(HLog log, int expected) throws Exception {
+    Path walPath = ((FSHLog) log).computeFilename();
+    HLog.Reader reader = HLogFactory.createReader(FS, walPath, CONF);
+    int count = 0;
+    HLog.Entry entry = new HLog.Entry();
+    while (reader.next(entry) != null) count++;
+    reader.close();
+    assertEquals(expected, count);
+  }
+
+  // lifted from TestAtomicOperation
+  private HRegion createHRegion (byte [] tableName, String callingMethod, HLog log, boolean isDeferredLogFlush)
+    throws IOException {
+      HTableDescriptor htd = new HTableDescriptor(tableName);
+      htd.setDeferredLogFlush(isDeferredLogFlush);
+      HColumnDescriptor hcd = new HColumnDescriptor(FAMILY);
+      htd.addFamily(hcd);
+      HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
+      Path path = new Path(DIR + callingMethod);
+      if (FS.exists(path)) {
+        if (!FS.delete(path, true)) {
+          throw new IOException("Failed delete of " + path);
+        }
+      }
+      return HRegion.createHRegion(info, path, HBaseConfiguration.create(), htd, log);
+    }
+
+}

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java?rev=1467357&r1=1467356&r2=1467357&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java Fri Apr 12 17:07:00 2013
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.client.HB
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.replication.ReplicationAdmin;
 import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@@ -298,14 +299,14 @@ public class TestMasterReplication {
     @Override
     public void prePut(final ObserverContext<RegionCoprocessorEnvironment> e,
         final Put put, final WALEdit edit, 
-        final boolean writeToWAL)
+        final Durability durability)
         throws IOException {
       nCount++;
     }
     @Override
     public void postDelete(final ObserverContext<RegionCoprocessorEnvironment> c,
         final Delete delete, final WALEdit edit, 
-        final boolean writeToWAL)
+        final Durability durability)
         throws IOException {
       nDelete++;
     }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java?rev=1467357&r1=1467356&r2=1467357&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java Fri Apr 12 17:07:00 2013
@@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.*;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.rest.client.Client;
 import org.apache.hadoop.hbase.rest.client.Cluster;
 import org.apache.hadoop.hbase.rest.client.Response;
@@ -87,7 +88,7 @@ public class TestScannerResource {
             k[1] = b2;
             k[2] = b3;
             Put put = new Put(k);
-            put.setWriteToWAL(false);
+            put.setDurability(Durability.SKIP_WAL);
             put.add(famAndQf[0], famAndQf[1], k);
             table.put(put);
             count++;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java?rev=1467357&r1=1467356&r2=1467357&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java Fri Apr 12 17:07:00 2013
@@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.client.HB
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterList;
@@ -137,7 +138,7 @@ public class TestScannersWithFilters {
       // Insert first half
       for(byte [] ROW : ROWS_ONE) {
         Put p = new Put(ROW);
-        p.setWriteToWAL(false);
+        p.setDurability(Durability.SKIP_WAL);
         for(byte [] QUALIFIER : QUALIFIERS_ONE) {
           p.add(FAMILIES[0], QUALIFIER, VALUES[0]);
         }
@@ -145,7 +146,7 @@ public class TestScannersWithFilters {
       }
       for(byte [] ROW : ROWS_TWO) {
         Put p = new Put(ROW);
-        p.setWriteToWAL(false);
+        p.setDurability(Durability.SKIP_WAL);
         for(byte [] QUALIFIER : QUALIFIERS_TWO) {
           p.add(FAMILIES[1], QUALIFIER, VALUES[1]);
         }
@@ -155,7 +156,7 @@ public class TestScannersWithFilters {
       // Insert second half (reverse families)
       for(byte [] ROW : ROWS_ONE) {
         Put p = new Put(ROW);
-        p.setWriteToWAL(false);
+        p.setDurability(Durability.SKIP_WAL);
         for(byte [] QUALIFIER : QUALIFIERS_ONE) {
           p.add(FAMILIES[1], QUALIFIER, VALUES[0]);
         }
@@ -163,7 +164,7 @@ public class TestScannersWithFilters {
       }
       for(byte [] ROW : ROWS_TWO) {
         Put p = new Put(ROW);
-        p.setWriteToWAL(false);
+        p.setDurability(Durability.SKIP_WAL);
         for(byte [] QUALIFIER : QUALIFIERS_TWO) {
           p.add(FAMILIES[0], QUALIFIER, VALUES[1]);
         }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java?rev=1467357&r1=1467356&r2=1467357&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java Fri Apr 12 17:07:00 2013
@@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.*;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.rest.client.Client;
 import org.apache.hadoop.hbase.rest.client.Cluster;
 import org.apache.hadoop.hbase.rest.client.Response;
@@ -94,7 +95,7 @@ public class TestTableResource {
           k[1] = b2;
           k[2] = b3;
           Put put = new Put(k);
-          put.setWriteToWAL(false);
+          put.setDurability(Durability.SKIP_WAL);
           put.add(famAndQf[0], famAndQf[1], k);
           table.put(put);
         }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java?rev=1467357&r1=1467356&r2=1467357&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java Fri Apr 12 17:07:00 2013
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.LargeTest
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.exceptions.SnapshotDoesNotExistException;
 import org.apache.hadoop.hbase.master.MasterFileSystem;
 import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
@@ -235,7 +236,7 @@ public class TestRestoreFlushSnapshotFro
       byte[] value = Bytes.add(Bytes.toBytes(System.currentTimeMillis()), Bytes.toBytes(rows));
       byte[] key = Bytes.toBytes(MD5Hash.getMD5AsHex(value));
       Put put = new Put(key);
-      put.setWriteToWAL(false);
+      put.setDurability(Durability.SKIP_WAL);
       for (byte[] family: families) {
         put.add(family, qualifier, value);
       }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java?rev=1467357&r1=1467356&r2=1467357&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java Fri Apr 12 17:07:00 2013
@@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.client.HT
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
@@ -215,7 +216,7 @@ public class TestCoprocessorScanPolicy {
     // since it is loaded by a different class loader
     @Override
     public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c, final Put put,
-        final WALEdit edit, final boolean writeToWAL) throws IOException {
+        final WALEdit edit, final Durability durability) throws IOException {
       if (put.getAttribute("ttl") != null) {
         Cell cell = put.getFamilyMap().values().iterator().next().get(0);
         KeyValue kv = KeyValueUtil.ensureKeyValue(cell);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java?rev=1467357&r1=1467356&r2=1467357&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java Fri Apr 12 17:07:00 2013
@@ -68,6 +68,7 @@ import org.apache.hadoop.hbase.client.Pu
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.io.hfile.TestHFile;
 import org.apache.hadoop.hbase.master.AssignmentManager;
 import org.apache.hadoop.hbase.master.HMaster;
@@ -165,7 +166,7 @@ public class TestHBaseFsck {
         // When we find a diff RS, change the assignment and break
         if (startCode != sn.getStartcode()) {
           Put put = new Put(res.getRow());
-          put.setWriteToWAL(false);
+          put.setDurability(Durability.SKIP_WAL);
           put.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
             Bytes.toBytes(sn.getHostAndPort()));
           put.add(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER,

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java?rev=1467357&r1=1467356&r2=1467357&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java Fri Apr 12 17:07:00 2013
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.catalog.C
 import org.apache.hadoop.hbase.catalog.MetaReader;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -141,7 +142,7 @@ public class TestMergeTable {
     LOG.info("Created region " + region.getRegionNameAsString());
     for(int i = firstRow; i < firstRow + nrows; i++) {
       Put put = new Put(Bytes.toBytes("row_" + String.format("%1$05d", i)));
-      put.setWriteToWAL(false);
+      put.setDurability(Durability.SKIP_WAL);
       put.add(COLUMN_NAME, null,  VALUE);
       region.put(put);
       if (i % 10000 == 0) {