You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by td...@apache.org on 2019/06/07 18:53:03 UTC

[phoenix] branch 4.14-HBase-1.3 updated: PHOENIX-5318 Slots passed to SkipScan filter is incorrect for desc primary keys that are prefixes of each other

This is an automated email from the ASF dual-hosted git repository.

tdsilva pushed a commit to branch 4.14-HBase-1.3
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/4.14-HBase-1.3 by this push:
     new 3c4f030  PHOENIX-5318 Slots passed to SkipScan filter is incorrect for desc primary keys that are prefixes of each other
3c4f030 is described below

commit 3c4f0301a749b3c1e6ef4d3fa39e3e0358775f95
Author: Thomas D'Silva <td...@apache.org>
AuthorDate: Thu Jun 6 15:19:03 2019 -0700

    PHOENIX-5318 Slots passed to SkipScan filter is incorrect for desc primary keys that are prefixes of each other
---
 .../apache/phoenix/end2end/SkipScanQueryIT.java    | 71 ++++++++++++++++++++++
 .../org/apache/phoenix/compile/ScanRanges.java     |  6 +-
 .../java/org/apache/phoenix/query/KeyRange.java    | 29 +++++++++
 3 files changed, 103 insertions(+), 3 deletions(-)

diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
index fb0b568..f66f196 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
@@ -29,13 +29,20 @@ import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.sql.Statement;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Properties;
 
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.filter.SkipScanFilter;
+import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.util.TestUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.PropertiesUtil;
@@ -584,4 +591,68 @@ public class SkipScanQueryIT extends ParallelStatsDisabledIT {
             assertTrue(rs.next());
         }
     }
+
+    @Test
+    public void testOrWithMixedOrderPKs() throws Exception {
+        String tableName = generateUniqueName();
+        try (Connection conn = DriverManager.getConnection(getUrl())) {
+            conn.setAutoCommit(true);
+            Statement stmt = conn.createStatement();
+
+            stmt.execute("CREATE TABLE " + tableName +
+                    " (COL1 VARCHAR, COL2 VARCHAR CONSTRAINT PK PRIMARY KEY (COL1 DESC, COL2)) ");
+
+            // this is the order the rows will be stored on disk
+            stmt.execute("UPSERT INTO " + tableName + " (COL1, COL2) VALUES ('8', 'a')");
+            stmt.execute("UPSERT INTO " + tableName + " (COL1, COL2) VALUES ('6', 'a')");
+            stmt.execute("UPSERT INTO " + tableName + " (COL1, COL2) VALUES ('23', 'b')");
+            stmt.execute("UPSERT INTO " + tableName + " (COL1, COL2) VALUES ('23', 'bb')");
+            stmt.execute("UPSERT INTO " + tableName + " (COL1, COL2) VALUES ('2', 'a')");
+            stmt.execute("UPSERT INTO " + tableName + " (COL1, COL2) VALUES ('17', 'a')");
+
+
+            // test values in the skip scan filter which are prefixes of another value, eg 1,12 and 2,23
+            String sql = "select COL1, COL2 from " + tableName + " where COL1='1' OR COL1='2' OR COL1='3' OR COL1='4' " +
+                    "OR COL1='5' OR COL1='6' OR COL1='8' OR COL1='17' OR COL1='12' OR COL1='23'";
+
+            ResultSet rs = stmt.executeQuery(sql);
+            assertTrue(rs.next());
+
+            QueryPlan plan = stmt.unwrap(PhoenixStatement.class).getQueryPlan();
+            assertEquals("Expected a single scan ", 1, plan.getScans().size());
+            assertEquals("Expected a single scan ", 1, plan.getScans().get(0).size());
+            Scan scan = plan.getScans().get(0).get(0);
+            FilterList filterList = (FilterList)scan.getFilter();
+            boolean skipScanFilterFound = false;
+            for (Filter f : filterList.getFilters()) {
+                if (f instanceof SkipScanFilter) {
+                    skipScanFilterFound = true;
+                    SkipScanFilter skipScanFilter = (SkipScanFilter) f;
+                    assertEquals("Expected a single slot ", skipScanFilter.getSlots().size(), 1);
+                    assertEquals("Number of key ranges should match number of or filters ",
+                            skipScanFilter.getSlots().get(0).size(), 10);
+                }
+            }
+            assertTrue("Should use skip scan filter", skipScanFilterFound);
+
+            assertEquals("8", rs.getString(1));
+            assertEquals("a", rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals("6", rs.getString(1));
+            assertEquals("a", rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals("23", rs.getString(1));
+            assertEquals("b", rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals("23", rs.getString(1));
+            assertEquals("bb", rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals("2", rs.getString(1));
+            assertEquals("a", rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals("17", rs.getString(1));
+            assertEquals("a", rs.getString(2));
+            assertFalse(rs.next());
+        }
+    }
 }
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
index 1732fce..74bfd90 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
@@ -110,8 +110,9 @@ public class ScanRanges {
         }
         List<List<KeyRange>> sortedRanges = Lists.newArrayListWithExpectedSize(ranges.size());
         for (int i = 0; i < ranges.size(); i++) {
+            Field f = schema.getField(i);
             List<KeyRange> sorted = Lists.newArrayList(ranges.get(i));
-            Collections.sort(sorted, KeyRange.COMPARATOR);
+            Collections.sort(sorted, f.getSortOrder() == SortOrder.ASC ? KeyRange.COMPARATOR : KeyRange.DESC_COMPARATOR);
             sortedRanges.add(ImmutableList.copyOf(sorted));
         }
         
@@ -608,9 +609,8 @@ public class ScanRanges {
                 if (ranges != null && ranges.size() > rowTimestampColPos) {
                     List<KeyRange> rowTimestampColRange = ranges.get(rowTimestampColPos);
                     List<KeyRange> sortedRange = new ArrayList<>(rowTimestampColRange);
-                    Collections.sort(sortedRange, KeyRange.COMPARATOR);
-                    //ranges.set(rowTimestampColPos, sortedRange); //TODO: do I really need to do this?
                     Field f = schema.getField(rowTimestampColPos);
+                    Collections.sort(sortedRange, f.getSortOrder() == SortOrder.ASC ? KeyRange.COMPARATOR : KeyRange.DESC_COMPARATOR);
                     SortOrder order = f.getSortOrder();
                     KeyRange lowestRange = sortedRange.get(0);
                     KeyRange highestRange = sortedRange.get(rowTimestampColRange.size() - 1);
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/query/KeyRange.java b/phoenix-core/src/main/java/org/apache/phoenix/query/KeyRange.java
index 7d09adb..2b66061 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/query/KeyRange.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/query/KeyRange.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
+import org.apache.phoenix.execute.DescVarLengthFastByteComparisons;
 import org.apache.phoenix.schema.SortOrder;
 import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.ScanUtil.BytesComparator;
@@ -108,6 +109,34 @@ public class KeyRange implements Writable {
         }
     };
 
+    public static final Comparator<KeyRange> DESC_COMPARATOR = new Comparator<KeyRange>() {
+        @Override public int compare(KeyRange o1, KeyRange o2) {
+            int result = Boolean.compare(o2.lowerUnbound(), o1.lowerUnbound());
+            if (result != 0) {
+                return result;
+            }
+            result = DescVarLengthFastByteComparisons.compareTo(o1.getLowerRange(), 0, o1.getLowerRange().length,
+                    o2.getLowerRange(), 0, o2.getLowerRange().length);
+            if (result != 0) {
+                return result;
+            }
+            result = Boolean.compare(o2.isLowerInclusive(), o1.isLowerInclusive());
+            if (result != 0) {
+                return result;
+            }
+            result = Boolean.compare(o1.upperUnbound(), o2.upperUnbound());
+            if (result != 0) {
+                return result;
+            }
+            result = DescVarLengthFastByteComparisons.compareTo(o1.getUpperRange(), 0, o1.getUpperRange().length,
+                    o2.getUpperRange(), 0, o2.getUpperRange().length);
+            if (result != 0) {
+                return result;
+            }
+            return Boolean.compare(o2.isUpperInclusive(), o1.isUpperInclusive());
+        }
+    };
+
     private byte[] lowerRange;
     private boolean lowerInclusive;
     private byte[] upperRange;