You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zh...@apache.org on 2020/03/29 09:47:02 UTC

[hbase] branch master updated: HBASE-23845 Removed deprecated setMaxVersions from Scan (#1208)

This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/master by this push:
     new aaae46c  HBASE-23845 Removed deprecated setMaxVersions from Scan (#1208)
aaae46c is described below

commit aaae46c976dc7d99779f86bf2b7ec73730e20577
Author: Jan Hentschel <ja...@ultratendency.com>
AuthorDate: Sun Mar 29 11:46:49 2020 +0200

    HBASE-23845 Removed deprecated setMaxVersions from Scan (#1208)
    
    Signed-off-by: Duo Zhang <zh...@apache.org>
---
 .../java/org/apache/hadoop/hbase/client/Scan.java  | 22 ++++------------------
 .../hadoop/hbase/mapreduce/TestImportExport.java   |  4 ++--
 .../hbase/mapreduce/TestTimeRangeMapRed.java       |  2 +-
 .../hbase/TestPartialResultsFromClientSide.java    |  2 +-
 .../hadoop/hbase/client/TestFromClientSide.java    |  8 ++++----
 .../hadoop/hbase/client/TestFromClientSide5.java   |  2 +-
 .../hbase/client/TestScannersFromClientSide.java   |  4 ++--
 .../hadoop/hbase/client/TestTimestampsFilter.java  |  2 +-
 .../hbase/filter/TestColumnPrefixFilter.java       |  4 ++--
 .../hadoop/hbase/filter/TestColumnRangeFilter.java |  2 +-
 .../filter/TestFilterListOrOperatorWithBlkCnt.java |  4 ++--
 .../filter/TestMultipleColumnPrefixFilter.java     |  8 ++++----
 .../hbase/regionserver/TestColumnSeeking.java      |  4 ++--
 .../hadoop/hbase/regionserver/TestHRegion.java     |  2 +-
 .../hadoop/hbase/regionserver/TestKeepDeletes.java | 16 ++++++++--------
 .../TestNewVersionBehaviorFromClientSide.java      |  6 +++---
 16 files changed, 39 insertions(+), 53 deletions(-)

diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index 2d02e25..6b142c9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -59,8 +59,7 @@ import org.slf4j.LoggerFactory;
  * To only retrieve columns with a specific timestamp, call {@link #setTimestamp(long) setTimestamp}
  * .
  * <p>
- * To limit the number of versions of each column to be returned, call {@link #setMaxVersions(int)
- * setMaxVersions}.
+ * To limit the number of versions of each column to be returned, call {@link #setMaxVersions(int)}.
  * <p>
  * To limit the maximum number of values returned for each call to next(), call
  * {@link #setBatch(int) setBatch}.
@@ -341,7 +340,7 @@ public class Scan extends Query {
    * returned, up the number of versions beyond the default.
    * @param minStamp minimum timestamp value, inclusive
    * @param maxStamp maximum timestamp value, exclusive
-   * @see #setMaxVersions()
+   * @see #readAllVersions()
    * @see #setMaxVersions(int)
    * @return this
    */
@@ -356,7 +355,7 @@ public class Scan extends Query {
    * and you want all versions returned, up the number of versions beyond the
    * defaut.
    * @param timestamp version timestamp
-   * @see #setMaxVersions()
+   * @see #readAllVersions()
    * @see #setMaxVersions(int)
    * @return this
    * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
@@ -374,7 +373,7 @@ public class Scan extends Query {
    * and you want all versions returned, up the number of versions beyond the
    * defaut.
    * @param timestamp version timestamp
-   * @see #setMaxVersions()
+   * @see #readAllVersions()
    * @see #setMaxVersions(int)
    * @return this
    */
@@ -518,19 +517,6 @@ public class Scan extends Query {
   }
 
   /**
-   * Get all available versions.
-   * @return this
-   * @deprecated since 2.0.0 and will be removed in 3.0.0. It is easy to misunderstand with column
-   *   family's max versions, so use {@link #readAllVersions()} instead.
-   * @see #readAllVersions()
-   * @see <a href="https://issues.apache.org/jira/browse/HBASE-17125">HBASE-17125</a>
-   */
-  @Deprecated
-  public Scan setMaxVersions() {
-    return readAllVersions();
-  }
-
-  /**
    * Get up to the specified number of versions of each column.
    * @param maxVersions maximum versions for each column
    * @return this
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index 089b8eb..12060a7 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -374,7 +374,7 @@ public class TestImportExport {
       assertTrue(runImport(args));
 
       Scan s = new Scan();
-      s.setMaxVersions();
+      s.readAllVersions();
       s.setRaw(true);
       ResultScanner scanner = t.getScanner(s);
       Result r = scanner.next();
@@ -448,7 +448,7 @@ public class TestImportExport {
     assertTrue(runImport(args));
 
     Scan s = new Scan();
-    s.setMaxVersions();
+    s.readAllVersions();
     s.setRaw(true);
 
     ResultScanner importedTScanner = importT.getScanner(s);
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
index 2a32f0c..ecef1d7 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
@@ -182,7 +182,7 @@ public class TestTimeRangeMapRed {
       Scan scan = new Scan();
       scan.addColumn(FAMILY_NAME, COLUMN_NAME);
       scan.setTimeRange(MINSTAMP, MAXSTAMP);
-      scan.setMaxVersions();
+      scan.readAllVersions();
       TableMapReduceUtil.initTableMapperJob(TABLE_NAME,
         scan, ProcessTimeRangeMapper.class, Text.class, Text.class, job);
       job.waitForCompletion(true);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
index 53670c4..cb1a192 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java
@@ -148,7 +148,7 @@ public class TestPartialResultsFromClientSide {
 
   public void testExpectedValuesOfPartialResults(boolean reversed) throws Exception {
     Scan partialScan = new Scan();
-    partialScan.setMaxVersions();
+    partialScan.readAllVersions();
     // Max result size of 1 ensures that each RPC request will return a single cell. The scanner
     // will need to reconstruct the results into a complete result before returning to the caller
     partialScan.setMaxResultSize(1);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 0fcfbfb..70e21ae 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -192,7 +192,7 @@ public class TestFromClientSide extends FromClientSideBase {
 
       Scan s = new Scan(T1);
       s.setTimeRange(0, ts + 3);
-      s.setMaxVersions();
+      s.readAllVersions();
       ResultScanner scanner = h.getScanner(s);
       Cell[] kvs = scanner.next().rawCells();
       assertArrayEquals(T2, CellUtil.cloneValue(kvs[0]));
@@ -201,7 +201,7 @@ public class TestFromClientSide extends FromClientSideBase {
 
       s = new Scan(T1);
       s.setRaw(true);
-      s.setMaxVersions();
+      s.readAllVersions();
       scanner = h.getScanner(s);
       kvs = scanner.next().rawCells();
       assertTrue(PrivateCellUtil.isDeleteFamily(kvs[0]));
@@ -1192,7 +1192,7 @@ public class TestFromClientSide extends FromClientSideBase {
 
       scan = new Scan(ROW);
       scan.addColumn(FAMILY, QUALIFIER);
-      scan.setMaxVersions();
+      scan.readAllVersions();
       result = getSingleScanResult(ht, scan);
       assertNResult(result, ROW, FAMILY, QUALIFIER,
         new long[] { STAMPS[1], STAMPS[2], STAMPS[3], STAMPS[4], STAMPS[5], STAMPS[6], STAMPS[7],
@@ -1210,7 +1210,7 @@ public class TestFromClientSide extends FromClientSideBase {
           VALUES[8] }, 0, 7);
 
       scan = new Scan(ROW);
-      scan.setMaxVersions();
+      scan.readAllVersions();
       result = getSingleScanResult(ht, scan);
       assertNResult(result, ROW, FAMILY, QUALIFIER,
         new long[] { STAMPS[1], STAMPS[2], STAMPS[3], STAMPS[4], STAMPS[5], STAMPS[6], STAMPS[7],
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide5.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide5.java
index 07c848f..4205a56 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide5.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide5.java
@@ -1448,7 +1448,7 @@ public class TestFromClientSide5 extends FromClientSideBase {
       int versions = 4;
       Scan s = new Scan(row);
       // get all the possible versions
-      s.setMaxVersions();
+      s.readAllVersions();
       s.setRaw(true);
 
       try (ResultScanner scanner = table.getScanner(s)) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
index 5f53d7e..0dbf745 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
@@ -199,7 +199,7 @@ public class TestScannersFromClientSide {
 
     // without batch
     scan = new Scan().withStartRow(ROW);
-    scan.setMaxVersions();
+    scan.readAllVersions();
     scanner = ht.getScanner(scan);
 
     // c4:4, c5:5, c6:6, c7:7
@@ -213,7 +213,7 @@ public class TestScannersFromClientSide {
 
     // with batch
     scan =  new Scan().withStartRow(ROW);
-    scan.setMaxVersions();
+    scan.readAllVersions();
     scan.setBatch(2);
     scanner = ht.getScanner(scan);
 
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
index ff8e8c3..6719337 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
@@ -356,7 +356,7 @@ public class TestTimestampsFilter {
     Filter filter = new TimestampsFilter(versions);
     Scan scan = new Scan(startRow, endRow);
     scan.setFilter(filter);
-    scan.setMaxVersions();
+    scan.readAllVersions();
     ResultScanner scanner = ht.getScanner(scan);
     return scanner.next(endRowIdx - startRowIdx + 1);
   }
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java
index 7b47b57..227eaa3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java
@@ -114,7 +114,7 @@ public class TestColumnPrefixFilter {
 
       ColumnPrefixFilter filter;
       Scan scan = new Scan();
-      scan.setMaxVersions();
+      scan.readAllVersions();
       for (String s: prefixMap.keySet()) {
         filter = new ColumnPrefixFilter(Bytes.toBytes(s));
 
@@ -183,7 +183,7 @@ public class TestColumnPrefixFilter {
 
       ColumnPrefixFilter filter;
       Scan scan = new Scan();
-      scan.setMaxVersions();
+      scan.readAllVersions();
       for (String s: prefixMap.keySet()) {
         filter = new ColumnPrefixFilter(Bytes.toBytes(s));
 
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
index c69466e..1041969 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
@@ -226,7 +226,7 @@ public class TestColumnRangeFilter {
 
     ColumnRangeFilter filter;
     Scan scan = new Scan();
-    scan.setMaxVersions();
+    scan.readAllVersions();
     for (StringRange s : rangeMap.keySet()) {
       filter = new ColumnRangeFilter(s.getStart() == null ? null : Bytes.toBytes(s.getStart()),
           s.isStartInclusive(), s.getEnd() == null ? null : Bytes.toBytes(s.getEnd()),
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java
index f252e77..24ac794 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java
@@ -104,7 +104,7 @@ public class TestFilterListOrOperatorWithBlkCnt {
     generateRows(numRows, ht, family, qf, value);
 
     Scan scan = new Scan();
-    scan.setMaxVersions();
+    scan.readAllVersions();
     long blocksStart = getBlkAccessCount();
 
     List<RowRange> ranges1 = new ArrayList<>();
@@ -154,7 +154,7 @@ public class TestFilterListOrOperatorWithBlkCnt {
 
   private List<Cell> getScanResult(byte[] startRow, byte[] stopRow, Table ht) throws IOException {
     Scan scan = new Scan();
-    scan.setMaxVersions();
+    scan.readAllVersions();
     if(!Bytes.toString(startRow).isEmpty()) {
       scan.withStartRow(startRow);
     }
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java
index ee45c0a..3f7a2b7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java
@@ -116,7 +116,7 @@ public class TestMultipleColumnPrefixFilter {
 
     MultipleColumnPrefixFilter filter;
     Scan scan = new Scan();
-    scan.setMaxVersions();
+    scan.readAllVersions();
     byte [][] filter_prefix = new byte [2][];
     filter_prefix[0] = new byte [] {'p'};
     filter_prefix[1] = new byte [] {'q'};
@@ -194,7 +194,7 @@ public class TestMultipleColumnPrefixFilter {
 
     MultipleColumnPrefixFilter filter;
     Scan scan = new Scan();
-    scan.setMaxVersions();
+    scan.readAllVersions();
     byte [][] filter_prefix = new byte [2][];
     filter_prefix[0] = new byte [] {'p'};
     filter_prefix[1] = new byte [] {'q'};
@@ -244,7 +244,7 @@ public class TestMultipleColumnPrefixFilter {
 
     MultipleColumnPrefixFilter multiplePrefixFilter;
     Scan scan1 = new Scan();
-    scan1.setMaxVersions();
+    scan1.readAllVersions();
     byte [][] filter_prefix = new byte [1][];
     filter_prefix[0] = new byte [] {'p'};
 
@@ -257,7 +257,7 @@ public class TestMultipleColumnPrefixFilter {
 
     ColumnPrefixFilter singlePrefixFilter;
     Scan scan2 = new Scan();
-    scan2.setMaxVersions();
+    scan2.readAllVersions();
     singlePrefixFilter = new ColumnPrefixFilter(Bytes.toBytes("p"));
 
     scan2.setFilter(singlePrefixFilter);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
index 1a4f5a0..a4753c8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
@@ -149,7 +149,7 @@ public class TestColumnSeeking {
       for (int i = 0; i < numberOfTests + 1; i++) {
         Collection<KeyValue> kvSet;
         Scan scan = new Scan();
-        scan.setMaxVersions();
+        scan.readAllVersions();
         if (i < numberOfTests) {
           if (columnLists[i].isEmpty()) continue; // HBASE-7700
           kvSet = kvMaps[i].values();
@@ -264,7 +264,7 @@ public class TestColumnSeeking {
     for (int i = 0; i < numberOfTests + 1; i++) {
       Collection<KeyValue> kvSet;
       Scan scan = new Scan();
-      scan.setMaxVersions();
+      scan.readAllVersions();
       if (i < numberOfTests) {
         if (columnLists[i].isEmpty()) continue; // HBASE-7700
         kvSet = kvMaps[i].values();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 188105a..499d9c9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -3432,7 +3432,7 @@ public class TestHRegion {
     region.put(put);
 
     Scan scan = new Scan(row3, row4);
-    scan.setMaxVersions();
+    scan.readAllVersions();
     scan.addColumn(family, col1);
     InternalScanner s = region.getScanner(scan);
 
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
index 6489b12..dcad173 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
@@ -217,7 +217,7 @@ public class TestKeepDeletes {
     // scan still returns delete markers and deletes rows
     Scan s = new Scan();
     s.setRaw(true);
-    s.setMaxVersions();
+    s.readAllVersions();
     InternalScanner scan = region.getScanner(s);
     List<Cell> kvs = new ArrayList<>();
     scan.next(kvs);
@@ -231,7 +231,7 @@ public class TestKeepDeletes {
     //  KEEP_DELETED_CELLS)
     s = new Scan();
     s.setRaw(true);
-    s.setMaxVersions();
+    s.readAllVersions();
     scan = region.getScanner(s);
     kvs = new ArrayList<>();
     scan.next(kvs);
@@ -275,7 +275,7 @@ public class TestKeepDeletes {
 
     // "past" scan does not see rows behind delete marker
     Scan s = new Scan();
-    s.setMaxVersions();
+    s.readAllVersions();
     s.setTimeRange(0L, ts+1);
     InternalScanner scanner = region.getScanner(s);
     List<Cell> kvs = new ArrayList<>();
@@ -306,7 +306,7 @@ public class TestKeepDeletes {
 
     Scan s = new Scan();
     s.setRaw(true);
-    s.setMaxVersions();
+    s.readAllVersions();
     s.addColumn(c0, c0);
 
     try {
@@ -352,7 +352,7 @@ public class TestKeepDeletes {
 
     Scan s = new Scan();
     s.setRaw(true);
-    s.setMaxVersions();
+    s.readAllVersions();
     InternalScanner scan = region.getScanner(s);
     List<Cell> kvs = new ArrayList<>();
     scan.next(kvs);
@@ -370,7 +370,7 @@ public class TestKeepDeletes {
     // verify that raw scans honor the passed timerange
     s = new Scan();
     s.setRaw(true);
-    s.setMaxVersions();
+    s.readAllVersions();
     s.setTimeRange(0, 1);
     scan = region.getScanner(s);
     kvs = new ArrayList<>();
@@ -381,7 +381,7 @@ public class TestKeepDeletes {
     // filter new delete markers
     s = new Scan();
     s.setRaw(true);
-    s.setMaxVersions();
+    s.readAllVersions();
     s.setTimeRange(0, ts+2);
     scan = region.getScanner(s);
     kvs = new ArrayList<>();
@@ -396,7 +396,7 @@ public class TestKeepDeletes {
     // filter old delete markers
     s = new Scan();
     s.setRaw(true);
-    s.setMaxVersions();
+    s.readAllVersions();
     s.setTimeRange(ts+3, ts+5);
     scan = region.getScanner(s);
     kvs = new ArrayList<>();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestNewVersionBehaviorFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestNewVersionBehaviorFromClientSide.java
index 11b29ac..ca4e7c4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestNewVersionBehaviorFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestNewVersionBehaviorFromClientSide.java
@@ -339,20 +339,20 @@ public class TestNewVersionBehaviorFromClientSide {
       t.delete(new Delete(ROW).addColumn(FAMILY, col1, 1000004));
       t.delete(new Delete(ROW).addColumn(FAMILY, col1, 1000003));
 
-      try (ResultScanner scannner = t.getScanner(new Scan().setRaw(true).setMaxVersions())) {
+      try (ResultScanner scannner = t.getScanner(new Scan().setRaw(true).readAllVersions())) {
         Result r = scannner.next();
         assertNull(scannner.next());
         assertEquals(6, r.size());
       }
       TEST_UTIL.getAdmin().flush(t.getName());
-      try (ResultScanner scannner = t.getScanner(new Scan().setRaw(true).setMaxVersions())) {
+      try (ResultScanner scannner = t.getScanner(new Scan().setRaw(true).readAllVersions())) {
         Result r = scannner.next();
         assertNull(scannner.next());
         assertEquals(6, r.size());
       }
       TEST_UTIL.getAdmin().majorCompact(t.getName());
       Threads.sleep(5000);
-      try (ResultScanner scannner = t.getScanner(new Scan().setRaw(true).setMaxVersions())) {
+      try (ResultScanner scannner = t.getScanner(new Scan().setRaw(true).readAllVersions())) {
         Result r = scannner.next();
         assertNull(scannner.next());
         assertEquals(1, r.size());