You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by an...@apache.org on 2014/02/24 06:42:51 UTC

[1/8] git commit: PHOENIX-29

Repository: incubator-phoenix
Updated Branches:
  refs/heads/master d3ed79541 -> e781ebaf2


PHOENIX-29


Project: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/commit/59618250
Tree: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/tree/59618250
Diff: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/diff/59618250

Branch: refs/heads/master
Commit: 59618250ba0515a547a52d6279f62e1897a4f563
Parents: 77cf266
Author: anoopsjohn <an...@gmail.com>
Authored: Mon Feb 24 01:11:25 2014 +0530
Committer: anoopsjohn <an...@gmail.com>
Committed: Mon Feb 24 01:11:25 2014 +0530

----------------------------------------------------------------------
 .../phoenix/compile/ProjectionCompiler.java     | 79 +++-----------------
 1 file changed, 11 insertions(+), 68 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/59618250/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java
index 3d5f8fe..cb1edab 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java
@@ -17,17 +17,9 @@
  */
 package org.apache.phoenix.compile;
 
-import java.io.ByteArrayOutputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
+import java.io.*;
 import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableSet;
-import java.util.Set;
+import java.util.*;
 
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -37,52 +29,22 @@ import org.apache.phoenix.compile.GroupByCompiler.GroupBy;
 import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
-import org.apache.phoenix.expression.BaseTerminalExpression;
-import org.apache.phoenix.expression.CoerceExpression;
-import org.apache.phoenix.expression.Expression;
-import org.apache.phoenix.expression.KeyValueColumnExpression;
+import org.apache.phoenix.expression.*;
 import org.apache.phoenix.expression.aggregator.ClientAggregators;
 import org.apache.phoenix.expression.aggregator.ServerAggregators;
 import org.apache.phoenix.expression.function.ArrayIndexFunction;
 import org.apache.phoenix.expression.function.SingleAggregateFunction;
 import org.apache.phoenix.expression.visitor.KeyValueExpressionVisitor;
 import org.apache.phoenix.expression.visitor.SingleAggregateFunctionVisitor;
-import org.apache.phoenix.parse.AliasedNode;
-import org.apache.phoenix.parse.BindParseNode;
-import org.apache.phoenix.parse.ColumnParseNode;
-import org.apache.phoenix.parse.FamilyWildcardParseNode;
-import org.apache.phoenix.parse.FunctionParseNode;
-import org.apache.phoenix.parse.ParseNode;
-import org.apache.phoenix.parse.SelectStatement;
-import org.apache.phoenix.parse.SequenceValueParseNode;
-import org.apache.phoenix.parse.TableName;
-import org.apache.phoenix.parse.TableWildcardParseNode;
-import org.apache.phoenix.parse.WildcardParseNode;
+import org.apache.phoenix.parse.*;
 import org.apache.phoenix.query.QueryConstants;
-import org.apache.phoenix.schema.ArgumentTypeMismatchException;
-import org.apache.phoenix.schema.ColumnNotFoundException;
-import org.apache.phoenix.schema.ColumnRef;
-import org.apache.phoenix.schema.KeyValueSchema;
+import org.apache.phoenix.schema.*;
 import org.apache.phoenix.schema.KeyValueSchema.KeyValueSchemaBuilder;
-import org.apache.phoenix.schema.PColumn;
-import org.apache.phoenix.schema.PColumnFamily;
-import org.apache.phoenix.schema.PDataType;
-import org.apache.phoenix.schema.PDatum;
-import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.ViewType;
-import org.apache.phoenix.schema.PTableType;
-import org.apache.phoenix.schema.RowKeySchema;
-import org.apache.phoenix.schema.TableRef;
-import org.apache.phoenix.schema.ValueBitSet;
 import org.apache.phoenix.schema.tuple.Tuple;
-import org.apache.phoenix.util.ByteUtil;
-import org.apache.phoenix.util.IndexUtil;
-import org.apache.phoenix.util.SchemaUtil;
-import org.apache.phoenix.util.SizedUtil;
+import org.apache.phoenix.util.*;
 
-import com.google.common.collect.Iterators;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
+import com.google.common.collect.*;
 
 
 /**
@@ -99,14 +61,6 @@ public class ProjectionCompiler {
     private ProjectionCompiler() {
     }
     
-    private static void projectAllColumnFamilies(PTable table, Scan scan) {
-        // Will project all known/declared column families
-        scan.getFamilyMap().clear();
-        for (PColumnFamily family : table.getColumnFamilies()) {
-            scan.addFamily(family.getName().getBytes());
-        }
-    }
-
     private static void projectColumnFamily(PTable table, Scan scan, byte[] family) {
         // Will project all colmuns for given CF
         scan.addFamily(family);
@@ -342,21 +296,10 @@ public class ProjectionCompiler {
         }
         
         selectVisitor.compile();
-        // Since we don't have the empty key value in read-only tables,
-        // we must project everything.
-        boolean isProjectEmptyKeyValue = table.getType() != PTableType.VIEW && table.getViewType() != ViewType.MAPPED && !isWildcard;
-        if (isProjectEmptyKeyValue) {
-            for (byte[] family : projectedFamilies) {
-                projectColumnFamily(table, scan, family);       
-            }
-        } else {
-            /* 
-             * TODO: this could be optimized by detecting:
-             * - if a column is projected that's not in the where clause
-             * - if a column is grouped by that's not in the where clause
-             * - if we're not using IS NULL or CASE WHEN expressions
-             */
-             projectAllColumnFamilies(table,scan);
+        boolean isProjectEmptyKeyValue = table.getType() != PTableType.VIEW && table.getViewType() != ViewType.MAPPED
+                && !isWildcard;
+        for (byte[] family : projectedFamilies) {
+            projectColumnFamily(table, scan, family);
         }
         return new RowProjector(projectedColumns, estimatedByteSize, isProjectEmptyKeyValue);
     }


[6/8] git commit: PHOENIX-29

Posted by an...@apache.org.
PHOENIX-29


Project: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/commit/b75206db
Tree: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/tree/b75206db
Diff: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/diff/b75206db

Branch: refs/heads/master
Commit: b75206db6eac2cc9809feefcc6b4709569101b3b
Parents: 6ba276b
Author: anoopsjohn <an...@gmail.com>
Authored: Mon Feb 24 01:15:43 2014 +0530
Committer: anoopsjohn <an...@gmail.com>
Committed: Mon Feb 24 01:15:43 2014 +0530

----------------------------------------------------------------------
 .../phoenix/iterate/ParallelIterators.java      | 84 ++++++++++++++------
 1 file changed, 58 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/b75206db/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java
index 9f19ade..563d97f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java
@@ -18,16 +18,9 @@
 package org.apache.phoenix.iterate;
 
 import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableSet;
-import java.util.UUID;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
+import java.util.*;
+import java.util.Map.Entry;
+import java.util.concurrent.*;
 
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.client.Scan;
@@ -36,23 +29,16 @@ import org.apache.hadoop.hbase.filter.PageFilter;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.compile.GroupByCompiler.GroupBy;
-import org.apache.phoenix.compile.RowProjector;
-import org.apache.phoenix.compile.StatementContext;
+import org.apache.phoenix.compile.*;
+import org.apache.phoenix.filter.ColumnProjectionFilter;
+import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.job.JobManager.JobCallable;
 import org.apache.phoenix.parse.FilterableStatement;
 import org.apache.phoenix.parse.HintNode;
-import org.apache.phoenix.query.ConnectionQueryServices;
-import org.apache.phoenix.query.KeyRange;
-import org.apache.phoenix.query.QueryConstants;
-import org.apache.phoenix.query.QueryServices;
-import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.SaltingUtil;
-import org.apache.phoenix.schema.TableRef;
-import org.apache.phoenix.util.ReadOnlyProps;
-import org.apache.phoenix.util.SQLCloseables;
-import org.apache.phoenix.util.ScanUtil;
-import org.apache.phoenix.util.SchemaUtil;
-import org.apache.phoenix.util.ServerUtil;
+import org.apache.phoenix.query.*;
+import org.apache.phoenix.schema.*;
+import org.apache.phoenix.schema.PTable.ViewType;
+import org.apache.phoenix.util.*;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -85,7 +71,9 @@ public class ParallelIterators extends ExplainTable implements ResultIterators {
         }
     };
 
-    public ParallelIterators(StatementContext context, TableRef tableRef, FilterableStatement statement, RowProjector projector, GroupBy groupBy, Integer limit, ParallelIteratorFactory iteratorFactory) throws SQLException {
+    public ParallelIterators(StatementContext context, TableRef tableRef, FilterableStatement statement,
+            RowProjector projector, GroupBy groupBy, Integer limit, ParallelIteratorFactory iteratorFactory)
+            throws SQLException {
         super(context, tableRef, groupBy);
         this.splits = getSplits(context, tableRef, statement.getHint());
         this.iteratorFactory = iteratorFactory;
@@ -96,7 +84,10 @@ public class ParallelIterators extends ExplainTable implements ResultIterators {
             // If nothing projected into scan and we only have one column family, just allow everything
             // to be projected and use a FirstKeyOnlyFilter to skip from row to row. This turns out to
             // be quite a bit faster.
-            if (familyMap.isEmpty() && table.getColumnFamilies().size() == 1) {
+            // Where condition columns also will get added into familyMap
+            // When where conditions are present, we can not add FirstKeyOnlyFilter at beginning.
+            if (familyMap.isEmpty() && context.getWhereCoditionColumns().isEmpty()
+                    && table.getColumnFamilies().size() == 1) {
                 // Project the one column family. We must project a column family since it's possible
                 // that there are other non declared column families that we need to ignore.
                 scan.addFamily(table.getColumnFamilies().get(0).getName().getBytes());
@@ -113,6 +104,47 @@ public class ParallelIterators extends ExplainTable implements ResultIterators {
         if (limit != null) {
             ScanUtil.andFilterAtEnd(scan, new PageFilter(limit));
         }
+
+        Map<byte[], NavigableSet<byte[]>> familyMap = scan.getFamilyMap();
+        if (familyMap != null && !familyMap.isEmpty()) {
+            // columnsTracker contain cf -> qualifiers which should get returned.
+            Map<ImmutableBytesPtr, NavigableSet<ImmutableBytesPtr>> columnsTracker = 
+                    new TreeMap<ImmutableBytesPtr, NavigableSet<ImmutableBytesPtr>>();
+            for (Entry<byte[], NavigableSet<byte[]>> entry : familyMap.entrySet()) {
+                ImmutableBytesPtr cf = new ImmutableBytesPtr(entry.getKey());
+                NavigableSet<byte[]> qs = entry.getValue();
+                NavigableSet<ImmutableBytesPtr> cols = null;
+                if (qs != null) {
+                    cols = new TreeSet<ImmutableBytesPtr>();
+                    for (byte[] q : qs) {
+                        cols.add(new ImmutableBytesPtr(q));
+                    }
+                }
+                columnsTracker.put(cf, cols);
+            }
+            // Making sure that where condition CFs are getting scanned at HRS.
+            for (Pair<byte[], byte[]> whereCol : context.getWhereCoditionColumns()) {
+                if (!(familyMap.containsKey(whereCol.getFirst()))) {
+                    scan.addFamily(whereCol.getFirst());
+                }
+            }
+            if (!columnsTracker.isEmpty()) {
+                for (ImmutableBytesPtr f : columnsTracker.keySet()) {
+                    // This addFamily will remove explicit cols in scan familyMap and make it as entire row.
+                    // We don't want the ExplicitColumnTracker to be used. Instead we have the ColumnProjectionFilter
+                    scan.addFamily(f.get());
+                }
+                ScanUtil.andFilterAtEnd(scan, new ColumnProjectionFilter(SchemaUtil.getEmptyColumnFamily(table),
+                        columnsTracker));
+            }
+            if (table.getViewType() == ViewType.MAPPED) {
+                // Since we don't have the empty key value in MAPPED tables, we must select all CFs in HRS. But only the
+                // selected column values are returned back to client
+                for (PColumnFamily family : table.getColumnFamilies()) {
+                    scan.addFamily(family.getName().getBytes());
+                }
+            }
+        }
     }
 
     /**


[2/8] git commit: PHOENIX-29

Posted by an...@apache.org.
PHOENIX-29


Project: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/commit/e6de665d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/tree/e6de665d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/diff/e6de665d

Branch: refs/heads/master
Commit: e6de665dcd725ed425656ec7d07027dff9df3e6b
Parents: 5961825
Author: anoopsjohn <an...@gmail.com>
Authored: Mon Feb 24 01:11:50 2014 +0530
Committer: anoopsjohn <an...@gmail.com>
Committed: Mon Feb 24 01:11:50 2014 +0530

----------------------------------------------------------------------
 .../phoenix/compile/StatementContext.java       | 25 +++++++++++++-------
 1 file changed, 16 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/e6de665d/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementContext.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementContext.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementContext.java
index a7edfdf..168f392 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementContext.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementContext.java
@@ -19,20 +19,17 @@ package org.apache.phoenix.compile;
 
 import java.sql.SQLException;
 import java.text.Format;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixStatement;
-import org.apache.phoenix.query.KeyRange;
-import org.apache.phoenix.query.QueryConstants;
-import org.apache.phoenix.query.QueryServices;
-import org.apache.phoenix.schema.MetaDataClient;
-import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.TableRef;
-import org.apache.phoenix.util.DateUtil;
-import org.apache.phoenix.util.NumberUtil;
-import org.apache.phoenix.util.ScanUtil;
+import org.apache.phoenix.query.*;
+import org.apache.phoenix.schema.*;
+import org.apache.phoenix.util.*;
 
 
 /**
@@ -62,6 +59,7 @@ public class StatementContext {
     private final SequenceManager sequences; 
 
     private TableRef currentTable;
+    private List<Pair<byte[], byte[]>> whereConditionColumns;
     
     public StatementContext(PhoenixStatement statement) {
         this(statement, FromCompiler.EMPTY_TABLE_RESOLVER, new Scan());
@@ -82,6 +80,7 @@ public class StatementContext {
         this.tempPtr = new ImmutableBytesWritable();
         this.currentTable = resolver != null && !resolver.getTables().isEmpty() ? resolver.getTables().get(0) : null;
         this.sequences = new SequenceManager(statement);
+        this.whereConditionColumns = new ArrayList<Pair<byte[],byte[]>>();
     }
 
     public String getDateFormat() {
@@ -219,4 +218,12 @@ public class StatementContext {
     public SequenceManager getSequenceManager(){
         return sequences;
     }
+
+    public void addWhereCoditionColumn(byte[] cf, byte[] q) {
+        whereConditionColumns.add(new Pair<byte[], byte[]>(cf, q));
+    }
+
+    public List<Pair<byte[], byte[]>> getWhereCoditionColumns() {
+        return whereConditionColumns;
+    }
 }


[8/8] git commit: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/incubator-phoenix

Posted by an...@apache.org.
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/incubator-phoenix


Project: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/commit/e781ebaf
Tree: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/tree/e781ebaf
Diff: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/diff/e781ebaf

Branch: refs/heads/master
Commit: e781ebaf2caa0ddc580554ac2c01572d62560ac7
Parents: 43381fc d3ed795
Author: anoopsjohn <an...@gmail.com>
Authored: Mon Feb 24 11:10:15 2014 +0530
Committer: anoopsjohn <an...@gmail.com>
Committed: Mon Feb 24 11:10:15 2014 +0530

----------------------------------------------------------------------
 .../java/org/apache/phoenix/compile/WhereCompiler.java    | 10 +---------
 1 file changed, 1 insertion(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/e781ebaf/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
----------------------------------------------------------------------


[7/8] git commit: PHOENIX-29

Posted by an...@apache.org.
PHOENIX-29


Project: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/commit/43381fcd
Tree: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/tree/43381fcd
Diff: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/diff/43381fcd

Branch: refs/heads/master
Commit: 43381fcdd44eb111b6b2feb36dc2d73efe6ead2c
Parents: b75206d
Author: anoopsjohn <an...@gmail.com>
Authored: Mon Feb 24 01:16:56 2014 +0530
Committer: anoopsjohn <an...@gmail.com>
Committed: Mon Feb 24 01:16:56 2014 +0530

----------------------------------------------------------------------
 .../ColumnProjectionOptimizationTest.java       | 253 +++++++++++++++++++
 1 file changed, 253 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/43381fcd/phoenix-core/src/test/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationTest.java b/phoenix-core/src/test/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationTest.java
new file mode 100644
index 0000000..e6aa02f
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/end2end/ColumnProjectionOptimizationTest.java
@@ -0,0 +1,253 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.apache.phoenix.util.TestUtil.*;
+import static org.junit.Assert.*;
+
+import java.sql.*;
+import java.util.Properties;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.schema.PDataType;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.SchemaUtil;
+import org.junit.Test;
+
+public class ColumnProjectionOptimizationTest extends BaseClientManagedTimeTest {
+
+    @Test
+    public void testSelect() throws Exception {
+        long ts = nextTimestamp();
+        String tenantId = getOrganizationId();
+        initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
+
+        Properties props = new Properties(TEST_PROPERTIES);
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 2));
+        Connection conn = DriverManager.getConnection(PHOENIX_JDBC_URL, props);
+
+        // Table wildcard query
+        String query = "SELECT * FROM aTable";
+        try {
+            PreparedStatement statement = conn.prepareStatement(query);
+            ResultSet rs = statement.executeQuery();
+            assertTrue(rs.next());
+            assertEquals(ROW1, rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals(ROW2, rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals(ROW3, rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals(ROW4, rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals(ROW5, rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals(ROW6, rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals(ROW7, rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals(ROW8, rs.getString(2));
+            assertTrue(rs.next());
+            assertEquals(ROW9, rs.getString(2));
+            assertFalse(rs.next());
+
+            // Select only specific columns
+            query = "SELECT A_STRING, A_INTEGER FROM aTable";
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue(rs.next());
+            assertEquals(A_VALUE, rs.getString(1));
+            assertEquals(1, rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals(A_VALUE, rs.getString(1));
+            assertEquals(2, rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals(A_VALUE, rs.getString(1));
+            assertEquals(3, rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals(A_VALUE, rs.getString(1));
+            assertEquals(4, rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals(B_VALUE, rs.getString(1));
+            assertEquals(5, rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals(B_VALUE, rs.getString(1));
+            assertEquals(6, rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals(B_VALUE, rs.getString(1));
+            assertEquals(7, rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals(B_VALUE, rs.getString(1));
+            assertEquals(8, rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals(C_VALUE, rs.getString(1));
+            assertEquals(9, rs.getInt(2));
+            assertFalse(rs.next());
+
+            // Select only specific columns with condition on another column (Not in select)
+            query = "SELECT B_STRING, A_SHORT FROM aTable WHERE X_INTEGER = ?";
+            statement = conn.prepareStatement(query);
+            statement.setInt(1, 4);
+            rs = statement.executeQuery();
+            assertTrue(rs.next());
+            assertEquals(C_VALUE, rs.getString(1));
+            assertEquals(135, rs.getShort(2));
+            assertFalse(rs.next());
+
+            // Select only specific columns with condition on another column (Not in select) and one row elements are
+            // nulls
+            query = "SELECT X_LONG, X_INTEGER, Y_INTEGER FROM aTable WHERE B_STRING = ?";
+            statement = conn.prepareStatement(query);
+            statement.setString(1, E_VALUE);
+            rs = statement.executeQuery();
+            assertTrue(rs.next());
+            assertEquals(0, rs.getLong(1));
+            assertTrue(rs.wasNull());
+            assertEquals(0, rs.getInt(2));
+            assertTrue(rs.wasNull());
+            assertEquals(0, rs.getInt(3));
+            assertTrue(rs.wasNull());
+            assertTrue(rs.next());
+            assertEquals(0, rs.getLong(1));
+            assertTrue(rs.wasNull());
+            assertEquals(0, rs.getInt(2));
+            assertTrue(rs.wasNull());
+            assertEquals(0, rs.getInt(3));
+            assertTrue(rs.wasNull());
+            assertTrue(rs.next());
+            assertEquals(Integer.MAX_VALUE + 1L, rs.getLong(1));
+            assertEquals(3, rs.getInt(2));
+            assertEquals(300, rs.getInt(3));
+            assertFalse(rs.next());
+
+            // Select only specific columns with condition on one of the selected column
+            query = "SELECT A_STRING, A_INTEGER FROM aTable WHERE A_INTEGER = ?";
+            statement = conn.prepareStatement(query);
+            statement.setInt(1, 9);
+            rs = statement.executeQuery();
+            assertTrue(rs.next());
+            assertEquals(C_VALUE, rs.getString(1));
+            assertEquals(9, rs.getInt(2));
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testSelectFromViewOnExistingTable() throws Exception {
+        PhoenixConnection pconn = DriverManager.getConnection(PHOENIX_JDBC_URL, TEST_PROPERTIES).unwrap(
+                PhoenixConnection.class);
+        byte[] cfB = Bytes.toBytes(SchemaUtil.normalizeIdentifier("b"));
+        byte[] cfC = Bytes.toBytes(SchemaUtil.normalizeIdentifier("c"));
+        byte[][] familyNames = new byte[][] { cfB, cfC };
+        byte[] htableName = SchemaUtil.getTableNameAsBytes(MDTEST_SCHEMA_NAME, MDTEST_NAME);
+        HBaseAdmin admin = pconn.getQueryServices().getAdmin();
+
+        HTableDescriptor descriptor = new HTableDescriptor(htableName);
+        for (byte[] familyName : familyNames) {
+            HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
+            descriptor.addFamily(columnDescriptor);
+        }
+        admin.createTable(descriptor);
+
+        long ts = nextTimestamp();
+        Properties props = new Properties();
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
+        Connection conn1 = DriverManager.getConnection(PHOENIX_JDBC_URL, props);
+
+        String createStmt = "create view " + MDTEST_NAME + " (id integer not null primary key,"
+                + " b.col1 integer, c.col2 bigint, c.col3 varchar(20))";
+        conn1.createStatement().execute(createStmt);
+        conn1.close();
+
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
+        PhoenixConnection conn2 = DriverManager.getConnection(PHOENIX_JDBC_URL, props).unwrap(PhoenixConnection.class);
+        byte[] c1 = Bytes.toBytes("COL1");
+        byte[] c2 = Bytes.toBytes("COL2");
+        byte[] c3 = Bytes.toBytes("COL3");
+        HTableInterface htable = null;
+        try {
+            htable = conn2.getQueryServices().getTable(htableName);
+            Put put = new Put(PDataType.INTEGER.toBytes(1));
+            put.add(cfB, c1, ts + 6, PDataType.INTEGER.toBytes(1));
+            put.add(cfC, c2, ts + 6, PDataType.LONG.toBytes(2));
+            htable.put(put);
+
+            put = new Put(PDataType.INTEGER.toBytes(2));
+            put.add(cfC, c2, ts + 6, PDataType.LONG.toBytes(10));
+            put.add(cfC, c3, ts + 6, PDataType.VARCHAR.toBytes("abcd"));
+            htable.put(put);
+
+            put = new Put(PDataType.INTEGER.toBytes(3));
+            put.add(cfB, c1, ts + 6, PDataType.INTEGER.toBytes(3));
+            put.add(cfC, c2, ts + 6, PDataType.LONG.toBytes(10));
+            put.add(cfC, c3, ts + 6, PDataType.VARCHAR.toBytes("abcd"));
+            htable.put(put);
+
+            conn2.close();
+
+            props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 10));
+            Connection conn7 = DriverManager.getConnection(PHOENIX_JDBC_URL, props);
+            String select = "SELECT id, b.col1 FROM " + MDTEST_NAME + " WHERE c.col2=?";
+            PreparedStatement ps = conn7.prepareStatement(select);
+            ps.setInt(1, 10);
+            ResultSet rs = ps.executeQuery();
+            assertTrue(rs.next());
+            assertEquals(2, rs.getInt(1));
+            assertEquals(0, rs.getInt(2));
+            assertTrue(rs.wasNull());
+            assertTrue(rs.next());
+            assertEquals(3, rs.getInt(1));
+            assertEquals(3, rs.getInt(2));
+            assertFalse(rs.next());
+
+            // Select contains only CF wildcards
+            select = "SELECT b.* FROM " + MDTEST_NAME + " WHERE c.col2=?";
+            ps = conn7.prepareStatement(select);
+            ps.setInt(1, 10);
+            rs = ps.executeQuery();
+            assertTrue(rs.next());
+            assertEquals(0, rs.getInt(1));
+            assertTrue(rs.wasNull());
+            assertTrue(rs.next());
+            assertEquals(3, rs.getInt(1));
+            assertFalse(rs.next());
+
+            select = "SELECT b.* FROM " + MDTEST_NAME;
+            ps = conn7.prepareStatement(select);
+            rs = ps.executeQuery();
+            assertTrue(rs.next());
+            assertEquals(1, rs.getInt(1));
+            assertTrue(rs.next());
+            assertEquals(0, rs.getInt(1));
+            assertTrue(rs.wasNull());
+            assertTrue(rs.next());
+            assertEquals(3, rs.getInt(1));
+            assertFalse(rs.next());
+        } finally {
+            if (htable != null) htable.close();
+            admin.disableTable(htableName);
+            admin.deleteTable(htableName);
+            admin.close();
+        }
+    }
+}


[3/8] git commit: PHOENIX-29

Posted by an...@apache.org.
PHOENIX-29


Project: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/commit/9f936749
Tree: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/tree/9f936749
Diff: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/diff/9f936749

Branch: refs/heads/master
Commit: 9f936749b89d06dcc966f94c73dd4da0aa2bef96
Parents: e6de665
Author: anoopsjohn <an...@gmail.com>
Authored: Mon Feb 24 01:12:32 2014 +0530
Committer: anoopsjohn <an...@gmail.com>
Committed: Mon Feb 24 01:12:32 2014 +0530

----------------------------------------------------------------------
 .../apache/phoenix/compile/WhereCompiler.java   | 39 +++++++++-----------
 1 file changed, 17 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/9f936749/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
index 86e4b82..956d442 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
@@ -26,30 +26,13 @@ import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
-import org.apache.phoenix.expression.Expression;
-import org.apache.phoenix.expression.KeyValueColumnExpression;
-import org.apache.phoenix.expression.LiteralExpression;
+import org.apache.phoenix.expression.*;
 import org.apache.phoenix.expression.visitor.KeyValueExpressionVisitor;
-import org.apache.phoenix.filter.MultiCFCQKeyValueComparisonFilter;
-import org.apache.phoenix.filter.MultiCQKeyValueComparisonFilter;
-import org.apache.phoenix.filter.RowKeyComparisonFilter;
-import org.apache.phoenix.filter.SingleCFCQKeyValueComparisonFilter;
-import org.apache.phoenix.filter.SingleCQKeyValueComparisonFilter;
-import org.apache.phoenix.parse.ColumnParseNode;
-import org.apache.phoenix.parse.FilterableStatement;
+import org.apache.phoenix.filter.*;
+import org.apache.phoenix.parse.*;
 import org.apache.phoenix.parse.HintNode.Hint;
-import org.apache.phoenix.parse.ParseNode;
-import org.apache.phoenix.parse.ParseNodeFactory;
-import org.apache.phoenix.schema.AmbiguousColumnException;
-import org.apache.phoenix.schema.ColumnNotFoundException;
-import org.apache.phoenix.schema.ColumnRef;
-import org.apache.phoenix.schema.PDataType;
-import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.PTableType;
-import org.apache.phoenix.schema.TypeMismatchException;
-import org.apache.phoenix.util.ByteUtil;
-import org.apache.phoenix.util.ScanUtil;
-import org.apache.phoenix.util.SchemaUtil;
+import org.apache.phoenix.schema.*;
+import org.apache.phoenix.util.*;
 
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Sets;
@@ -112,6 +95,18 @@ public class WhereCompiler {
         }
 
         @Override
+        public Expression visit(ColumnParseNode node) throws SQLException {
+            ColumnRef ref = resolveColumn(node);
+            TableRef tableRef = ref.getTableRef();
+            if (tableRef.equals(context.getCurrentTable()) && !SchemaUtil.isPKColumn(ref.getColumn())) {
+                // track the where condition columns. Later we need to ensure the Scan in HRS scans these column CFs
+                context.addWhereCoditionColumn(ref.getColumn().getFamilyName().getBytes(), ref.getColumn().getName()
+                        .getBytes());
+            }
+            return ref.newColumnExpression();
+        }
+
+        @Override
         protected ColumnRef resolveColumn(ColumnParseNode node) throws SQLException {
             ColumnRef ref = super.resolveColumn(node);
             PTable table = ref.getTable();


[4/8] git commit: PHOENIX-29

Posted by an...@apache.org.
PHOENIX-29


Project: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/commit/51f8a00d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/tree/51f8a00d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/diff/51f8a00d

Branch: refs/heads/master
Commit: 51f8a00dbdb011b5279f82d1a3f18399008eab3c
Parents: 9f93674
Author: anoopsjohn <an...@gmail.com>
Authored: Mon Feb 24 01:12:53 2014 +0530
Committer: anoopsjohn <an...@gmail.com>
Committed: Mon Feb 24 01:12:53 2014 +0530

----------------------------------------------------------------------
 .../phoenix/filter/ColumnProjectionFilter.java  | 126 +++++++++++++++++++
 1 file changed, 126 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/51f8a00d/phoenix-core/src/main/java/org/apache/phoenix/filter/ColumnProjectionFilter.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/filter/ColumnProjectionFilter.java b/phoenix-core/src/main/java/org/apache/phoenix/filter/ColumnProjectionFilter.java
new file mode 100644
index 0000000..bf9bc7e
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/filter/ColumnProjectionFilter.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.filter;
+
+import java.io.*;
+import java.util.*;
+import java.util.Map.Entry;
+
+import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.KeyValue.Type;
+import org.apache.hadoop.hbase.filter.FilterBase;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
+import org.apache.phoenix.query.QueryConstants;
+
+/**
+ * When selecting specific columns in a SELECT query, this filter passes only selected columns back to client.
+ *
+ * @since 3.0
+ */
+public class ColumnProjectionFilter extends FilterBase {
+
+    private byte[] emptyCFName;
+    private Map<ImmutableBytesPtr, NavigableSet<ImmutableBytesPtr>> columnsTracker;
+
+    public ColumnProjectionFilter() {
+
+    }
+
+    public ColumnProjectionFilter(byte[] emptyCFName,
+            Map<ImmutableBytesPtr, NavigableSet<ImmutableBytesPtr>> columnsTracker) {
+        this.emptyCFName = emptyCFName;
+        this.columnsTracker = columnsTracker;
+    }
+
+    @Override
+    public void readFields(DataInput input) throws IOException {
+        this.emptyCFName = WritableUtils.readCompressedByteArray(input);
+        int familyMapSize = WritableUtils.readVInt(input);
+        assert familyMapSize > 0;
+        columnsTracker = new TreeMap<ImmutableBytesPtr, NavigableSet<ImmutableBytesPtr>>();
+        while (familyMapSize > 0) {
+            byte[] cf = WritableUtils.readCompressedByteArray(input);
+            int qualifiersSize = WritableUtils.readVInt(input);
+            NavigableSet<ImmutableBytesPtr> qualifiers = null;
+            if (qualifiersSize > 0) {
+                qualifiers = new TreeSet<ImmutableBytesPtr>();
+                while (qualifiersSize > 0) {
+                    qualifiers.add(new ImmutableBytesPtr(WritableUtils.readCompressedByteArray(input)));
+                    qualifiersSize--;
+                }
+            }
+            columnsTracker.put(new ImmutableBytesPtr(cf), qualifiers);
+            familyMapSize--;
+        }
+    }
+
+    @Override
+    public void write(DataOutput output) throws IOException {
+        WritableUtils.writeCompressedByteArray(output, this.emptyCFName);
+        WritableUtils.writeVInt(output, this.columnsTracker.size());
+        for (Entry<ImmutableBytesPtr, NavigableSet<ImmutableBytesPtr>> entry : this.columnsTracker.entrySet()) {
+            // write family name
+            WritableUtils.writeCompressedByteArray(output, entry.getKey().copyBytes());
+            int qaulsSize = entry.getValue() == null ? 0 : entry.getValue().size();
+            WritableUtils.writeVInt(output, qaulsSize);
+            if (qaulsSize > 0) {
+                for (ImmutableBytesPtr cq : entry.getValue()) {
+                    // write qualifier name
+                    WritableUtils.writeCompressedByteArray(output, cq.copyBytes());
+                }
+            }
+        }
+    }
+
+    @Override
+    public void filterRow(List<KeyValue> kvs) {
+        if (kvs.isEmpty()) return;
+        KeyValue firstKV = kvs.get(0);
+        Iterator<KeyValue> itr = kvs.iterator();
+        while (itr.hasNext()) {
+            KeyValue kv = itr.next();
+            ImmutableBytesPtr f = new ImmutableBytesPtr(kv.getBuffer(), kv.getFamilyOffset(), kv.getFamilyLength());
+            if (this.columnsTracker.containsKey(f)) {
+                Set<ImmutableBytesPtr> cols = this.columnsTracker.get(f);
+                ImmutableBytesPtr q = new ImmutableBytesPtr(kv.getBuffer(), kv.getQualifierOffset(),
+                        kv.getQualifierLength());
+                if (cols != null && !(cols.contains(q))) {
+                    itr.remove();
+                }
+            } else {
+                itr.remove();
+            }
+        }
+        if (kvs.isEmpty()) {
+            kvs.add(new KeyValue(firstKV.getBuffer(), firstKV.getRowOffset(), firstKV.getRowLength(), this.emptyCFName,
+                    0, this.emptyCFName.length, QueryConstants.EMPTY_COLUMN_BYTES, 0,
+                    QueryConstants.EMPTY_COLUMN_BYTES.length, HConstants.LATEST_TIMESTAMP, Type.Maximum, null, 0, 0));
+        }
+    }
+
+    @Override
+    public boolean hasFilterRow() {
+        return true;
+    }
+
+    @Override
+    public String toString() {
+        return "";
+    }
+}
\ No newline at end of file


[5/8] git commit: PHOENIX-29

Posted by an...@apache.org.
PHOENIX-29


Project: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/commit/6ba276be
Tree: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/tree/6ba276be
Diff: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/diff/6ba276be

Branch: refs/heads/master
Commit: 6ba276be0169e08f0894940651a4f990c7df4c58
Parents: 51f8a00
Author: anoopsjohn <an...@gmail.com>
Authored: Mon Feb 24 01:13:12 2014 +0530
Committer: anoopsjohn <an...@gmail.com>
Committed: Mon Feb 24 01:13:12 2014 +0530

----------------------------------------------------------------------
 .../apache/phoenix/iterate/ExplainTable.java    | 33 ++++++++------------
 1 file changed, 13 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/6ba276be/phoenix-core/src/main/java/org/apache/phoenix/iterate/ExplainTable.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/ExplainTable.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ExplainTable.java
index d72f124..6075007 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/ExplainTable.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ExplainTable.java
@@ -18,27 +18,17 @@
 package org.apache.phoenix.iterate;
 
 import java.text.Format;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.NoSuchElementException;
+import java.util.*;
 
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.FilterList;
-import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
-import org.apache.hadoop.hbase.filter.PageFilter;
+import org.apache.hadoop.hbase.filter.*;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.compile.GroupByCompiler.GroupBy;
-import org.apache.phoenix.compile.ScanRanges;
-import org.apache.phoenix.compile.StatementContext;
+import org.apache.phoenix.compile.*;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.query.KeyRange.Bound;
-import org.apache.phoenix.schema.PDataType;
-import org.apache.phoenix.schema.RowKeySchema;
-import org.apache.phoenix.schema.SortOrder;
-import org.apache.phoenix.schema.TableRef;
+import org.apache.phoenix.schema.*;
 import org.apache.phoenix.util.StringUtil;
 
 import com.google.common.collect.Iterators;
@@ -116,9 +106,7 @@ public abstract class ExplainTable {
                     }
                     if (filterList.size() > offset+1) {
                         filterDesc = filterList.get(offset+1).toString();
-                        if (filterList.size() > offset+2) {
-                            pageFilter = (PageFilter) filterList.get(offset+2);
-                        }
+                        pageFilter = getPageFilter(filterList);
                     }
                 }
             } else if (filter instanceof FilterList) {
@@ -129,9 +117,7 @@ public abstract class ExplainTable {
                 }
                 if (filterList.size() > offset) {
                     filterDesc = filterList.get(offset).toString();
-                    if (filterList.size() > offset+1) {
-                        pageFilter = (PageFilter) filterList.get(offset+1);
-                    }
+                    pageFilter = getPageFilter(filterList);
                 }
             } else {
                 if (filter instanceof FirstKeyOnlyFilter) {
@@ -152,6 +138,13 @@ public abstract class ExplainTable {
         groupBy.explain(planSteps);
     }
 
+    private PageFilter getPageFilter(List<Filter> filterList) {
+        for (Filter filter : filterList) {
+            if (filter instanceof PageFilter) return (PageFilter)filter;
+        }
+        return null;
+    }
+
     private void appendPKColumnValue(StringBuilder buf, byte[] range, Boolean isNull, int slotIndex) {
         if (Boolean.TRUE.equals(isNull)) {
             buf.append("null");