You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by od...@apache.org on 2016/10/05 18:29:11 UTC

[01/18] incubator-hawq git commit: HAWQ-1048. Draft implementation.

Repository: incubator-hawq
Updated Branches:
  refs/heads/HAWQ-964 [created] 509aef120


HAWQ-1048. Draft implementation.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/cd186f6f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/cd186f6f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/cd186f6f

Branch: refs/heads/HAWQ-964
Commit: cd186f6fb3ab6bc74833ef5184da03f113ee7995
Parents: 981c0a9
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Fri Sep 16 19:00:50 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Fri Sep 16 19:00:50 2016 -0700

----------------------------------------------------------------------
 src/backend/access/external/pxffilters.c | 152 ++++++++++++--------------
 src/include/access/pxffilters.h          |   9 +-
 2 files changed, 80 insertions(+), 81 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/cd186f6f/src/backend/access/external/pxffilters.c
----------------------------------------------------------------------
diff --git a/src/backend/access/external/pxffilters.c b/src/backend/access/external/pxffilters.c
index 6767735..76e83b4 100644
--- a/src/backend/access/external/pxffilters.c
+++ b/src/backend/access/external/pxffilters.c
@@ -31,7 +31,7 @@
 #include "utils/guc.h"
 #include "utils/lsyscache.h"
 
-static List* pxf_make_filter_list(List* quals);
+static List* pxf_make_expression_items_list(List *quals);
 static void pxf_free_filter(PxfFilterDesc* filter);
 static void pxf_free_filter_list(List *filters);
 static char* pxf_serialize_filter_list(List *filters);
@@ -157,7 +157,7 @@ Oid pxf_supported_types[] =
 };
 
 /*
- * pxf_make_filter_list
+ * pxf_make_expression_items_list
  *
  * Given a scan node qual list, find the filters that are eligible to be used
  * by PXF, construct a PxfFilterDesc list that describes the filter information,
@@ -166,7 +166,7 @@ Oid pxf_supported_types[] =
  * Caller is responsible for pfreeing the returned PxfFilterDesc List.
  */
 static List *
-pxf_make_filter_list(List *quals)
+pxf_make_expression_items_list(List *quals)
 {
 	List			*result = NIL;
 	ListCell		*lc = NULL;
@@ -174,10 +174,6 @@ pxf_make_filter_list(List *quals)
 	if (list_length(quals) == 0)
 		return NIL;
 
-	/*
-	 * Iterate over all implicitly ANDed qualifiers and add the ones
-	 * that are supported for push-down into the result filter list.
-	 */
 	foreach (lc, quals)
 	{
 		Node *node = (Node *) lfirst(lc);
@@ -187,38 +183,19 @@ pxf_make_filter_list(List *quals)
 		{
 			case T_OpExpr:
 			{
-				OpExpr			*expr 	= (OpExpr *) node;
-				PxfFilterDesc	*filter;
-
-				filter = (PxfFilterDesc *) palloc0(sizeof(PxfFilterDesc));
-				elog(DEBUG5, "pxf_make_filter_list: node tag %d (T_OpExpr)", tag);
-
-				if (opexpr_to_pxffilter(expr, filter))
-					result = lappend(result, filter);
-				else
-					pfree(filter);
-
+				result = lappend(result, node);
 				break;
 			}
 			case T_BoolExpr:
 			{
 				BoolExpr	*expr = (BoolExpr *) node;
-				BoolExprType boolType = expr->boolop;
-				elog(DEBUG5, "pxf_make_filter_list: node tag %d (T_BoolExpr), bool node type %d %s",
-						tag, boolType, boolType==AND_EXPR ? "(AND_EXPR)" : "");
-
-				/* only AND_EXPR is supported */
-				if (expr->boolop == AND_EXPR)
-				{
-					List *inner_result = pxf_make_filter_list(expr->args);
-					elog(DEBUG5, "pxf_make_filter_list: inner result size %d", list_length(inner_result));
-					result = list_concat(result, inner_result);
-				}
+				List *inner_result = pxf_make_expression_items_list(expr->args);
+				result = list_concat(result, inner_result);
+				result = lappend(result, node);
 				break;
 			}
 			default:
-				/* expression not supported. ignore */
-				elog(DEBUG5, "pxf_make_filter_list: unsupported node tag %d", tag);
+				elog(DEBUG5, "pxf_make_expression_items_list: unsupported node tag %d", tag);
 				break;
 		}
 	}
@@ -297,19 +274,16 @@ pxf_free_filter_list(List *filters)
  *
  */
 static char *
-pxf_serialize_filter_list(List *filters)
+pxf_serialize_filter_list(List *expressionItems)
 {
 	StringInfo	 resbuf;
-	StringInfo	 curbuf;
 	ListCell	*lc = NULL;
 
-	if (list_length(filters) == 0)
+	if (list_length(expressionItems) == 0)
 		return NULL;
 
 	resbuf = makeStringInfo();
 	initStringInfo(resbuf);
-	curbuf = makeStringInfo();
-	initStringInfo(curbuf);
 
 	/*
 	 * Iterate through the filters in the list and serialize them one after
@@ -317,53 +291,71 @@ pxf_serialize_filter_list(List *filters)
 	 * typical small number of memcpy's this generates overall, there's no
 	 * point in optimizing, better keep it clear.
 	 */
-	foreach (lc, filters)
+	foreach (lc, expressionItems)
 	{
-		PxfFilterDesc		*filter	= (PxfFilterDesc *) lfirst(lc);
-		PxfOperand			 l		= filter->l;
-		PxfOperand			 r 		= filter->r;
-		PxfOperatorCode	 o 		= filter->op;
-
-		/* last result is stored in 'oldbuf'. start 'curbuf' clean */
-		resetStringInfo(curbuf);
-
-		/* format the operands */
-		if (pxfoperand_is_attr(l) && pxfoperand_is_const(r))
-		{
-			appendStringInfo(curbuf, "%c%d%c%s",
-									 PXF_ATTR_CODE, l.attnum - 1, /* Java attrs are 0-based */
-									 PXF_CONST_CODE, (r.conststr)->data);
-		}
-		else if (pxfoperand_is_const(l) && pxfoperand_is_attr(r))
-		{
-			appendStringInfo(curbuf, "%c%s%c%d",
-									 PXF_CONST_CODE, (l.conststr)->data,
-									 PXF_ATTR_CODE, r.attnum - 1); /* Java attrs are 0-based */
-		}
-		else
-		{
-			/* pxf_make_filter_list() should have never let this happen */
-			ereport(ERROR,
-					(errcode(ERRCODE_INTERNAL_ERROR),
-					 errmsg("internal error in pxffilters.c:pxf_serialize_"
-							 "filter_list. Found a non const+attr filter")));
-		}
-
-		/* format the operator */
-		appendStringInfo(curbuf, "%c%d", PXF_OPERATOR_CODE, o);
-
-		/* append this result to the previous result */
-		appendBinaryStringInfo(resbuf, curbuf->data, curbuf->len);
+		Node *node = (Node *) lfirst(lc);
+		NodeTag tag = nodeTag(node);
 
-		/* if there was a previous result, append a trailing AND operator */
-		if(resbuf->len > curbuf->len)
+		switch (tag)
 		{
-			appendStringInfo(resbuf, "%c%d", PXF_OPERATOR_CODE, PXFOP_AND);
+			case T_OpExpr:
+			{
+				PxfFilterDesc *filter = (PxfFilterDesc *) palloc0(sizeof(PxfFilterDesc));
+				OpExpr *expr = (OpExpr *) node;
+				if (opexpr_to_pxffilter(expr, filter))
+				{
+					PxfOperand l = filter->l;
+					PxfOperand r = filter->r;
+					PxfOperatorCode o = filter->op;
+					if (pxfoperand_is_attr(l) && pxfoperand_is_const(r))
+					{
+						appendStringInfo(resbuf, "%c%d%c%s",
+												 PXF_ATTR_CODE, l.attnum - 1, /* Java attrs are 0-based */
+												 PXF_CONST_CODE, (r.conststr)->data);
+					}
+					else if (pxfoperand_is_const(l) && pxfoperand_is_attr(r))
+					{
+						appendStringInfo(resbuf, "%c%s%c%d",
+												 PXF_CONST_CODE, (l.conststr)->data,
+												 PXF_ATTR_CODE, r.attnum - 1); /* Java attrs are 0-based */
+					}
+					else
+					{
+						/* pxf_make_filter_list() should have never let this happen */
+						ereport(ERROR,
+								(errcode(ERRCODE_INTERNAL_ERROR),
+								 errmsg("internal error in pxffilters.c:pxf_serialize_"
+										 "filter_list. Found a non const+attr filter")));
+					}
+					appendStringInfo(resbuf, "%c%d", PXF_OPERATOR_CODE, o);
+				}
+				else
+					pfree(filter);
+				break;
+			}
+			case T_BoolExpr:
+			{
+				BoolExpr *expr = (BoolExpr *) node;
+				BoolExprType boolType = expr->boolop;
+				PxfOperatorCode pxfOperandCode;
+				switch (boolType)
+				{
+					case AND_EXPR:
+						pxfOperandCode = PXFLOP_AND;
+						break;
+					case OR_EXPR:
+						pxfOperandCode = PXFLOP_OR;
+						break;
+					case NOT_EXPR:
+						pxfOperandCode = PXFLOP_NOT;
+						break;
+				}
+				appendStringInfo(resbuf, "%c%d", PXF_LOGICAL_OPERATOR_CODE, pxfOperandCode);
+				break;
+			}
 		}
 	}
 
-	pfree(curbuf->data);
-
 	return resbuf->data;
 }
 
@@ -626,10 +618,10 @@ char *serializePxfFilterQuals(List *quals)
 
 	if (pxf_enable_filter_pushdown)
 	{
-		List *filters = pxf_make_filter_list(quals);
 
-		result  = pxf_serialize_filter_list(filters);
-		pxf_free_filter_list(filters);
+		List *expressionItems = pxf_make_expression_items_list(quals);
+		result  = pxf_serialize_filter_list(expressionItems);
+		//pxf_free_filter_list(expressionItems);
 	}
 	elog(DEBUG2, "serializePxfFilterQuals: filter result: %s", (result == NULL) ? "null" : result);
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/cd186f6f/src/include/access/pxffilters.h
----------------------------------------------------------------------
diff --git a/src/include/access/pxffilters.h b/src/include/access/pxffilters.h
index f54c47c..3e80944 100644
--- a/src/include/access/pxffilters.h
+++ b/src/include/access/pxffilters.h
@@ -44,11 +44,17 @@ typedef enum PxfOperatorCode
 	PXFOP_GE,
 	PXFOP_EQ,
 	PXFOP_NE,
-	PXFOP_AND,
 	PXFOP_LIKE
 
 } PxfOperatorCode;
 
+typedef enum PxfLogicalOperatorCode
+{
+	PXFLOP_AND = 0,
+	PXFLOP_OR,
+	PXFLOP_NOT,
+} PxfLogicalOperatorCode;
+
 /*
  * each supported operand from both sides of the operator is represented
  * by a code that will describe the operator type in the final serialized
@@ -57,6 +63,7 @@ typedef enum PxfOperatorCode
 #define PXF_ATTR_CODE		'a'
 #define PXF_CONST_CODE		'c'
 #define PXF_OPERATOR_CODE	'o'
+#define PXF_LOGICAL_OPERATOR_CODE	'l'
 
 /*
  * An Operand has any of the above codes, and the information specific to


[13/18] incubator-hawq git commit: Merge branch 'master' into HAWQ-1048

Posted by od...@apache.org.
Merge branch 'master' into HAWQ-1048


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/9225016b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/9225016b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/9225016b

Branch: refs/heads/HAWQ-964
Commit: 9225016bdff957afb6f4d3d21c65d2daaa454b05
Parents: d187395 dc12e94
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Thu Sep 22 16:46:46 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Thu Sep 22 16:46:46 2016 -0700

----------------------------------------------------------------------
 .travis.yml                                     |    4 +-
 LICENSE                                         |   39 +-
 licenses/LICENSE-json.txt                       |  131 --
 licenses/LICENSE-pexpect-4.2.txt                |   20 +
 licenses/LICENSE-plperl.txt                     |  135 --
 licenses/LICENSE-ppport.txt                     |  135 ++
 licenses/LICENSE-ptyprocess-0.5.1.txt           |   16 +
 src/backend/access/external/pxffilters.c        |   13 +-
 src/backend/access/external/pxfheaders.c        |    9 +-
 src/include/catalog/JSON.pm                     | 2223 ------------------
 src/include/catalog/JSON/PP.pm                  | 2191 -----------------
 src/include/catalog/JSON/PP/Boolean.pm          |   26 -
 src/include/catalog/JSON/PP5005.pm              |  146 --
 src/include/catalog/JSON/PP56.pm                |  198 --
 src/include/catalog/JSON/PP58.pm                |   93 -
 .../ManagementTool/test_hawq_register.cpp       |  104 -
 .../feature/ManagementTool/test_hawq_register.h |   85 +-
 .../test_hawq_register_usage2_case1.cpp         |  207 ++
 .../test_hawq_register_usage2_case2.cpp         |   77 +
 .../test_hawq_register_usage2_case3.cpp         |   77 +
 .../ManagementTool/usage2case1/bucket0_tpl.yml  |   31 +
 .../usage2case1/error_encoding_tpl.yml          |   21 +
 .../usage2case1/includedirectory.yml            |   21 +
 .../usage2case1/larger_eof_tpl.yml              |   21 +
 .../wrong_distributed_policy_tpl.yml            |   21 +
 .../ManagementTool/usage2case2/t_tpl_1.yml      |   31 +
 .../ManagementTool/usage2case2/t_tpl_2.yml      |   33 +
 .../ManagementTool/usage2case2/t_tpl_3.yml      |   21 +
 .../ManagementTool/usage2case2/t_tpl_4.yml      |   23 +
 .../ManagementTool/usage2case2/t_tpl_new_1.yml  |   43 +
 .../ManagementTool/usage2case2/t_tpl_new_2.yml  |   45 +
 .../ManagementTool/usage2case2/t_tpl_new_3.yml  |   23 +
 .../ManagementTool/usage2case2/t_tpl_new_4.yml  |   25 +
 .../ManagementTool/usage2case3/t_tpl_old_1.yml  |   31 +
 .../ManagementTool/usage2case3/t_tpl_old_2.yml  |   33 +
 .../ManagementTool/usage2case3/t_tpl_old_3.yml  |   21 +
 .../ManagementTool/usage2case3/t_tpl_old_4.yml  |   23 +
 tools/bin/Makefile                              |   12 -
 tools/bin/hawqregister                          | 1003 +++++---
 tools/bin/lib/.gitignore                        |    7 -
 40 files changed, 1765 insertions(+), 5653 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/9225016b/src/backend/access/external/pxffilters.c
----------------------------------------------------------------------


[17/18] incubator-hawq git commit: HAWQ-964. Remove old HBaseLogic, typed Partition Comparison

Posted by od...@apache.org.
HAWQ-964. Remove old HBaseLogic, typed Partition Comparison


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/55db7ecd
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/55db7ecd
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/55db7ecd

Branch: refs/heads/HAWQ-964
Commit: 55db7ecd0c3a7ffc365a1c850b5500053d92e4f4
Parents: 8f73c2b
Author: Kavinder Dhaliwal <ka...@gmail.com>
Authored: Tue Oct 4 15:17:49 2016 -0700
Committer: Kavinder Dhaliwal <ka...@gmail.com>
Committed: Tue Oct 4 15:17:49 2016 -0700

----------------------------------------------------------------------
 .../pxf/plugins/hbase/HBaseFilterBuilder.java   | 13 +---
 .../hawq/pxf/plugins/hive/HiveAccessor.java     | 68 +++++++++++++++++++-
 2 files changed, 68 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/55db7ecd/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java b/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java
index 29c8686..5ec0652 100644
--- a/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java
+++ b/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java
@@ -240,18 +240,7 @@ public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
      * Currently, 1, 2 can occur, since no parenthesis are used.
      */
     private Filter handleCompoundOperations(FilterParser.LogicalOperation opId, Filter left, Filter right) {
-        FilterList result;
-
-        if (left instanceof FilterList) {
-            result = (FilterList) left;
-            result.addFilter(right);
-
-            return result;
-        }
-
-        result = new FilterList(logicalOperatorsMap.get(opId), new Filter[] {left, right});
-
-        return result;
+        return new FilterList(logicalOperatorsMap.get(opId), new Filter[] {left, right});
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/55db7ecd/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
index b2b3e4b..8cff706 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
@@ -19,9 +19,12 @@ package org.apache.hawq.pxf.plugins.hive;
  * under the License.
  */
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hawq.pxf.api.BasicFilter;
 import org.apache.hawq.pxf.api.FilterParser;
 import org.apache.hawq.pxf.api.LogicalFilter;
+import org.apache.hawq.pxf.api.UnsupportedTypeException;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
 import org.apache.hawq.pxf.api.utilities.InputData;
 import org.apache.hawq.pxf.plugins.hdfs.HdfsSplittableDataAccessor;
@@ -33,10 +36,16 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
 
 import java.io.IOException;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 
+import static org.apache.hawq.pxf.api.io.DataType.*;
+import static org.apache.hawq.pxf.api.io.DataType.BPCHAR;
+import static org.apache.hawq.pxf.api.io.DataType.BYTEA;
+
 /**
  * Accessor for Hive tables. The accessor will open and read a split belonging
  * to a Hive table. Opening a split means creating the corresponding InputFormat
@@ -247,7 +256,16 @@ public class HiveAccessor extends HdfsSplittableDataAccessor {
                  * the filter field matches a partition field, but the values do
                  * not match
                  */
-                        return filterValue.equals(partition.val);
+                        boolean keepPartition = filterValue.equals(partition.val);
+
+                        /*
+                         * If the string comparison fails then we should check the comparison of
+                         * the two operands as typed values
+                         */
+                        if (!keepPartition && !partition.val.equals("__HIVE_DEFAULT_PARTITION__")){
+                            keepPartition = testFilterByType(filterValue, partition);
+                        }
+                        return keepPartition;
                     }
                 }
 
@@ -261,6 +279,54 @@ public class HiveAccessor extends HdfsSplittableDataAccessor {
         }
         return partitionAllowed;
     }
+
+    /*
+     * Given two values in String form and their type, convert each to the same type do an equality check
+     */
+    private boolean testFilterByType(String filterValue, HivePartition partition) {
+        boolean result;
+        switch (partition.type) {
+            case serdeConstants.BOOLEAN_TYPE_NAME:
+                result = Boolean.valueOf(filterValue).equals(Boolean.valueOf(partition.val));
+                break;
+            case serdeConstants.TINYINT_TYPE_NAME:
+            case serdeConstants.SMALLINT_TYPE_NAME:
+                result = (Short.parseShort(filterValue) == Short.parseShort(partition.val));
+                break;
+            case serdeConstants.INT_TYPE_NAME:
+                result = (Integer.parseInt(filterValue) == Integer.parseInt(partition.val));
+                break;
+            case serdeConstants.BIGINT_TYPE_NAME:
+                result = (Long.parseLong(filterValue) == Long.parseLong(partition.val));
+                break;
+            case serdeConstants.FLOAT_TYPE_NAME:
+                result = (Float.parseFloat(filterValue) == Float.parseFloat(partition.val));
+                break;
+            case serdeConstants.DOUBLE_TYPE_NAME:
+                result = (Double.parseDouble(filterValue) == Double.parseDouble(partition.val));
+                break;
+            case serdeConstants.TIMESTAMP_TYPE_NAME:
+                result = Timestamp.valueOf(filterValue).equals(Timestamp.valueOf(partition.val));
+                break;
+            case serdeConstants.DATE_TYPE_NAME:
+                result = Date.valueOf(filterValue).equals(Date.valueOf(partition.val));
+                break;
+            case serdeConstants.DECIMAL_TYPE_NAME:
+                result = HiveDecimal.create(filterValue).bigDecimalValue().equals(HiveDecimal.create(partition.val).bigDecimalValue());
+                break;
+            case serdeConstants.BINARY_TYPE_NAME:
+                result = filterValue.getBytes().equals(partition.val.getBytes());
+                break;
+            case serdeConstants.STRING_TYPE_NAME:
+            case serdeConstants.VARCHAR_TYPE_NAME:
+            case serdeConstants.CHAR_TYPE_NAME:
+            default:
+               result = false;
+        }
+
+        return result;
+    }
+
     /*
      * We are testing one filter against all the partition fields. The filter
      * has the form "fieldA = valueA". The partitions have the form


[05/18] incubator-hawq git commit: HAWQ-964. Support for OR and NOT Logical Operators

Posted by od...@apache.org.
HAWQ-964. Support for OR and NOT Logical Operators

Signed-off-by: Leslie Chang <hi...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/726be6cc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/726be6cc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/726be6cc

Branch: refs/heads/HAWQ-964
Commit: 726be6cced0edd67fd0a0308d0483ee54b8ac116
Parents: e6c7fda
Author: Kavinder Dhaliwal <ka...@gmail.com>
Authored: Thu Sep 15 10:56:20 2016 -0700
Committer: Kavinder Dhaliwal <ka...@gmail.com>
Committed: Tue Sep 20 09:45:44 2016 -0700

----------------------------------------------------------------------
 .../org/apache/hawq/pxf/api/FilterParser.java   |  71 ++++++------
 .../apache/hawq/pxf/api/FilterParserTest.java   | 115 +++++++++++++++++++
 .../pxf/plugins/hbase/HBaseFilterBuilder.java   |  52 +++++----
 .../hawq/pxf/plugins/hive/HiveAccessor.java     |   5 +-
 .../pxf/plugins/hive/HiveDataFragmenter.java    |   3 +-
 .../pxf/plugins/hive/HiveFilterBuilder.java     |  80 ++++++-------
 .../hawq/pxf/plugins/hive/HiveORCAccessor.java  |  39 +++++--
 .../pxf/plugins/hive/HiveFilterBuilderTest.java |  46 +++++---
 .../hive/HiveORCSearchArgumentExample.java      |  84 ++++++++++++++
 9 files changed, 370 insertions(+), 125 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/726be6cc/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FilterParser.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FilterParser.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FilterParser.java
index 00fbf2b..22c76a6 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FilterParser.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FilterParser.java
@@ -52,6 +52,7 @@ public class FilterParser {
     private FilterBuilder filterBuilder;
 
     private static Map<Integer, Operation> operatorTranslationMap = initOperatorTransMap();
+    private static Map<Integer, LogicalOperation> logicalOperationTranslationMap = initLogicalOperatorTransMap();
 
     /** Supported operations by the parser. */
     public enum Operation {
@@ -65,6 +66,12 @@ public class FilterParser {
         HDOP_LIKE
     }
 
+    public enum LogicalOperation {
+        HDOP_AND,
+        HDOP_OR,
+        HDOP_NOT
+    }
+
     /**
      * Interface a user of FilterParser should implement.
      * This is used to let the user build filter expressions in the manner she sees fit.
@@ -81,6 +88,8 @@ public class FilterParser {
          * @throws Exception if building the filter failed
          */
         public Object build(Operation operation, Object left, Object right) throws Exception;
+        public Object build(LogicalOperation operation, Object left, Object right) throws Exception;
+        public Object build(LogicalOperation operation, Object filter) throws Exception;
     }
 
     /** Represents a column index. */
@@ -110,42 +119,6 @@ public class FilterParser {
     }
 
     /**
-     * Basic filter provided for cases where the target storage system does not provide it own filter
-     * For example: Hbase storage provides its own filter but for a Writable based record in a
-     * SequenceFile there is no filter provided and so we need to have a default
-     */
-    static public class BasicFilter {
-        private Operation oper;
-        private ColumnIndex column;
-        private Constant constant;
-
-        /**
-         * Constructs a BasicFilter.
-         *
-         * @param oper the parse operation to perform
-         * @param column the column index
-         * @param constant the constant object
-         */
-        public BasicFilter(Operation oper, ColumnIndex column, Constant constant) {
-            this.oper = oper;
-            this.column = column;
-            this.constant = constant;
-        }
-
-        public Operation getOperation() {
-            return oper;
-        }
-
-        public ColumnIndex getColumn() {
-            return column;
-        }
-
-        public Constant getConstant() {
-            return constant;
-        }
-    }
-
-    /**
      * Thrown when a filter's parsing exception occurs.
      */
     @SuppressWarnings("serial")
@@ -220,6 +193,24 @@ public class FilterParser {
                     // Store result on stack
                     operandsStack.push(result);
                     break;
+                case 'l':
+                    LogicalOperation logicalOperation = logicalOperationTranslationMap.get(safeToInt(parseNumber()));
+
+                    if (logicalOperation == null) {
+                        throw new FilterStringSyntaxException("unknown op ending at " + index);
+                    }
+
+                    if (logicalOperation == LogicalOperation.HDOP_NOT) {
+                        Object exp = operandsStack.pop();
+                        result = filterBuilder.build(logicalOperation, exp);
+                    } else {
+                        rightOperand  = operandsStack.pop();
+                        leftOperand = operandsStack.pop();
+
+                        result = filterBuilder.build(logicalOperation, leftOperand, rightOperand);
+                    }
+                    operandsStack.push(result);
+                    break;
                 default:
                     index--; // move index back to operand location
                     throw new FilterStringSyntaxException("unknown opcode " + op +
@@ -396,4 +387,12 @@ public class FilterParser {
         operatorTranslationMap.put(8, Operation.HDOP_LIKE);
         return operatorTranslationMap;
     }
+
+    static private Map<Integer, LogicalOperation> initLogicalOperatorTransMap() {
+        Map<Integer, LogicalOperation> integerLogicalOperationMap = new HashMap<>();
+        integerLogicalOperationMap.put(0, LogicalOperation.HDOP_AND);
+        integerLogicalOperationMap.put(1, LogicalOperation.HDOP_OR);
+        integerLogicalOperationMap.put(2, LogicalOperation.HDOP_NOT);
+        return integerLogicalOperationMap;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/726be6cc/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java
index 1ded4a3..83bb2dc 100644
--- a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java
+++ b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java
@@ -22,13 +22,17 @@ package org.apache.hawq.pxf.api;
 
 import org.apache.hawq.pxf.api.FilterParser.FilterBuilder;
 import org.apache.hawq.pxf.api.FilterParser.Operation;
+import org.apache.hawq.pxf.api.FilterParser.LogicalOperation;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
 import static org.junit.Assert.fail;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.eq;
@@ -282,6 +286,117 @@ public class FilterParserTest {
         assertEquals(lastOp, result);
     }
 
+    @Test
+    public void parseLogicalAndOperator() throws Exception {
+        filter = "l0";
+        Object op = "filter with 1 AND operator";
+
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_AND),
+                any(),
+                any())).thenReturn(op);
+
+        Object result = filterParser.parse(filter);
+
+        assertEquals(op, result);
+    }
+
+    @Test
+    public void parseLogicalOrOperator() throws Exception {
+        filter = "l1";
+
+        Object op = "filter with 1 OR operator";
+
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_OR),
+                any(),
+                any())).thenReturn(op);
+
+        Object result = filterParser.parse(filter);
+        assertEquals(op, result);
+    }
+
+    @Test
+    public void parseLogicalNotOperator() throws Exception {
+        filter = "l2";
+
+        Object op = "filter with NOT operator";
+
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_NOT),
+                any(),
+                any())).thenReturn(op);
+
+        Object result = filterParser.parse(filter);
+        assertEquals(op, result);
+    }
+
+    @Rule
+    public ExpectedException thrown = ExpectedException.none();
+    @Test
+    public void parseLogicalUnknownCodeError() throws Exception {
+        thrown.expect(FilterParser.FilterStringSyntaxException.class);
+        thrown.expectMessage("unknown op ending at 2");
+
+        filter = "l7";
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_AND),
+                any(),
+                any())).thenReturn(null);
+
+        Object result = filterParser.parse(filter);
+    }
+
+    @Test
+    public void parseLogicalOperatorWithExpressions() throws Exception {
+        filter = "a1c\"first\"o5a2c2o2l0";
+        Object firstOp = "first operation HDOP_EQ";
+        Object secondOp = "second operation HDOP_GT";
+        Object lastOp = "filter with 2 operations connected by AND";
+
+        when(filterBuilder.build(eq(Operation.HDOP_EQ),
+                any(),
+                any())).thenReturn(firstOp);
+
+
+        when(filterBuilder.build(eq(Operation.HDOP_GT),
+                any(),
+                any())).thenReturn(secondOp);
+
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_AND),
+                any(),
+                any())).thenReturn(lastOp);
+
+
+        Object result = filterParser.parse(filter);
+        assertEquals(lastOp, result);
+    }
+
+    @Test
+    public void parseLogicalOperatorNotExpression() throws Exception {
+        filter = "a1c\"first\"o5a2c2o2l0l2";
+        Object firstOp = "first operation HDOP_EQ";
+        Object secondOp = "second operation HDOP_GT";
+        Object thirdOp = "filter with 2 operations connected by AND";
+        Object lastOp = "filter with 1 NOT operation";
+
+        when(filterBuilder.build(eq(Operation.HDOP_EQ),
+                any(),
+                any())).thenReturn(firstOp);
+
+
+        when(filterBuilder.build(eq(Operation.HDOP_GT),
+                any(),
+                any())).thenReturn(secondOp);
+
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_AND),
+                any(),
+                any())).thenReturn(thirdOp);
+
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_NOT),
+                any())).thenReturn(lastOp);
+
+        Object result = filterParser.parse(filter);
+        assertEquals(lastOp, result);
+    }
+
+
 	/*
      * Helper functions
 	 */

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/726be6cc/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java b/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java
index 8eadc88..29c8686 100644
--- a/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java
+++ b/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java
@@ -52,12 +52,15 @@ import static org.apache.hawq.pxf.api.io.DataType.TEXT;
  */
 public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
     private Map<FilterParser.Operation, CompareFilter.CompareOp> operatorsMap;
+    private Map<FilterParser.LogicalOperation, FilterList.Operator> logicalOperatorsMap;
     private byte[] startKey;
     private byte[] endKey;
     private HBaseTupleDescription tupleDescription;
+    private static final String NOT_OP = "l2";
 
     public HBaseFilterBuilder(HBaseTupleDescription tupleDescription) {
         initOperatorsMap();
+        initLogicalOperatorsMap();
         startKey = HConstants.EMPTY_START_ROW;
         endKey = HConstants.EMPTY_END_ROW;
         this.tupleDescription = tupleDescription;
@@ -71,14 +74,19 @@ public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
      * @throws Exception if parsing failed
      */
     public Filter getFilterObject(String filterString) throws Exception {
-        FilterParser parser = new FilterParser(this);
-        Object result = parser.parse(filterString);
+        // First check for NOT, HBase does not support this
+        if (filterString.contains(NOT_OP)) {
+            return null;
+        } else {
+            FilterParser parser = new FilterParser(this);
+            Object result = parser.parse(filterString);
+
+            if (!(result instanceof Filter)) {
+                throw new Exception("String " + filterString + " resolved to no filter");
+            }
 
-        if (!(result instanceof Filter)) {
-            throw new Exception("String " + filterString + " resolved to no filter");
+            return (Filter) result;
         }
-
-        return (Filter) result;
     }
 
     /**
@@ -122,18 +130,6 @@ public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
     public Object build(FilterParser.Operation opId,
                         Object leftOperand,
                         Object rightOperand) throws Exception {
-        if (leftOperand instanceof Filter) {
-            if (opId != FilterParser.Operation.HDOP_AND ||
-                    !(rightOperand instanceof Filter)) {
-                throw new Exception("Only AND is allowed between compound expressions");
-            }
-
-            return handleCompoundOperations((Filter) leftOperand, (Filter) rightOperand);
-        }
-
-        if (!(rightOperand instanceof FilterParser.Constant)) {
-            throw new Exception("expressions of column-op-column are not supported");
-        }
 
         // Assume column is on the left
         return handleSimpleOperations(opId,
@@ -141,6 +137,16 @@ public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
                 (FilterParser.Constant) rightOperand);
     }
 
+    @Override
+    public Object build(FilterParser.LogicalOperation opId, Object leftOperand, Object rightOperand) {
+        return handleCompoundOperations(opId, (Filter) leftOperand, (Filter) rightOperand);
+    }
+
+    @Override
+    public Object build(FilterParser.LogicalOperation opId, Object leftOperand) {
+        return null;
+    }
+
     /**
      * Initializes the {@link #operatorsMap} with appropriate values.
      */
@@ -154,6 +160,12 @@ public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
         operatorsMap.put(FilterParser.Operation.HDOP_NE, CompareFilter.CompareOp.NOT_EQUAL); // "!="
     }
 
+    private void initLogicalOperatorsMap() {
+        logicalOperatorsMap = new HashMap<>();
+        logicalOperatorsMap.put(FilterParser.LogicalOperation.HDOP_AND, FilterList.Operator.MUST_PASS_ALL);
+        logicalOperatorsMap.put(FilterParser.LogicalOperation.HDOP_OR, FilterList.Operator.MUST_PASS_ONE);
+    }
+
     /**
      * Handles simple column-operator-constant expressions.
      * Creates a special filter in the case the column is the row key column.
@@ -227,7 +239,7 @@ public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
      * <p>
      * Currently, 1, 2 can occur, since no parenthesis are used.
      */
-    private Filter handleCompoundOperations(Filter left, Filter right) {
+    private Filter handleCompoundOperations(FilterParser.LogicalOperation opId, Filter left, Filter right) {
         FilterList result;
 
         if (left instanceof FilterList) {
@@ -237,7 +249,7 @@ public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
             return result;
         }
 
-        result = new FilterList(FilterList.Operator.MUST_PASS_ALL, new Filter[] {left, right});
+        result = new FilterList(logicalOperatorsMap.get(opId), new Filter[] {left, right});
 
         return result;
     }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/726be6cc/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
index ab40b3c..20a1b9f 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
@@ -19,6 +19,7 @@ package org.apache.hawq.pxf.plugins.hive;
  * under the License.
  */
 
+import org.apache.hawq.pxf.api.BasicFilter;
 import org.apache.hawq.pxf.api.FilterParser;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
 import org.apache.hawq.pxf.api.utilities.InputData;
@@ -219,7 +220,7 @@ public class HiveAccessor extends HdfsSplittableDataAccessor {
     private boolean testOneFilter(List<HivePartition> partitionFields,
                                   Object filter, InputData input) {
         // Let's look first at the filter
-        FilterParser.BasicFilter bFilter = (FilterParser.BasicFilter) filter;
+        BasicFilter bFilter = (BasicFilter) filter;
 
         boolean isFilterOperationEqual = (bFilter.getOperation() == FilterParser.Operation.HDOP_EQ);
         if (!isFilterOperationEqual) /*
@@ -253,7 +254,7 @@ public class HiveAccessor extends HdfsSplittableDataAccessor {
     }
 
     private void printOneBasicFilter(Object filter) {
-        FilterParser.BasicFilter bFilter = (FilterParser.BasicFilter) filter;
+        BasicFilter bFilter = (BasicFilter) filter;
         boolean isOperationEqual = (bFilter.getOperation() == FilterParser.Operation.HDOP_EQ);
         int columnIndex = bFilter.getColumn().index();
         String value = bFilter.getConstant().constant().toString();

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/726be6cc/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
index 2fe31cd..8446905 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hawq.pxf.api.BasicFilter;
 import org.apache.hawq.pxf.api.FilterParser;
 import org.apache.hawq.pxf.api.Fragment;
 import org.apache.hawq.pxf.api.Fragmenter;
@@ -433,7 +434,7 @@ public class HiveDataFragmenter extends Fragmenter {
             throws Exception {
 
         // Let's look first at the filter
-        FilterParser.BasicFilter bFilter = (FilterParser.BasicFilter) filter;
+        BasicFilter bFilter = (BasicFilter) filter;
 
         // In case this is not an "equality filter", we ignore this filter (no
         // add to filter list)

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/726be6cc/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilder.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilder.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilder.java
index da20f74..bd82a3b 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilder.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilder.java
@@ -20,17 +20,20 @@ package org.apache.hawq.pxf.plugins.hive;
  */
 
 
+import org.apache.hawq.pxf.api.BasicFilter;
 import org.apache.hawq.pxf.api.FilterParser;
+import org.apache.hawq.pxf.api.LogicalFilter;
 import org.apache.hawq.pxf.api.utilities.InputData;
 
+import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 
 /**
  * Uses the filter parser code to build a filter object, either simple - a
- * single {@link org.apache.hawq.pxf.api.FilterParser.BasicFilter} object or a
+ * single {@link BasicFilter} object or a
  * compound - a {@link java.util.List} of
- * {@link org.apache.hawq.pxf.api.FilterParser.BasicFilter} objects.
+ * {@link BasicFilter} objects.
  * {@link org.apache.hawq.pxf.plugins.hive.HiveAccessor} will use the filter for
  * partition filtering.
  */
@@ -47,13 +50,13 @@ public class HiveFilterBuilder implements FilterParser.FilterBuilder {
     }
 
     /**
-     * Translates a filterString into a {@link org.apache.hawq.pxf.api.FilterParser.BasicFilter} or a
+     * Translates a filterString into a {@link BasicFilter} or a
      * list of such filters.
      *
      * @param filterString the string representation of the filter
-     * @return a single {@link org.apache.hawq.pxf.api.FilterParser.BasicFilter}
+     * @return a single {@link BasicFilter}
      *         object or a {@link java.util.List} of
-     *         {@link org.apache.hawq.pxf.api.FilterParser.BasicFilter} objects.
+     *         {@link BasicFilter} objects.
      * @throws Exception if parsing the filter failed or filter is not a basic
      *             filter or list of basic filters
      */
@@ -61,7 +64,7 @@ public class HiveFilterBuilder implements FilterParser.FilterBuilder {
         FilterParser parser = new FilterParser(this);
         Object result = parser.parse(filterString);
 
-        if (!(result instanceof FilterParser.BasicFilter)
+        if (!(result instanceof LogicalFilter) && !(result instanceof BasicFilter)
                 && !(result instanceof List)) {
             throw new Exception("String " + filterString
                     + " resolved to no filter");
@@ -71,33 +74,19 @@ public class HiveFilterBuilder implements FilterParser.FilterBuilder {
     }
 
     @Override
+    public Object build(FilterParser.LogicalOperation op, Object leftOperand, Object rightOperand) {
+        return handleLogicalOperation(op, leftOperand, rightOperand);
+    }
+
+    @Override
+    public Object build(FilterParser.LogicalOperation op, Object filter) {
+        return handleLogicalOperation(op, filter);
+    }
+
+    @Override
     @SuppressWarnings("unchecked")
     public Object build(FilterParser.Operation opId, Object leftOperand,
                         Object rightOperand) throws Exception {
-        if (leftOperand instanceof FilterParser.BasicFilter
-                || leftOperand instanceof List) {
-            if (opId != FilterParser.Operation.HDOP_AND
-                    || !(rightOperand instanceof FilterParser.BasicFilter)) {
-                throw new Exception(
-                        "Only AND is allowed between compound expressions");
-            }
-
-            if (leftOperand instanceof List) {
-                return handleCompoundOperations(
-                        (List<FilterParser.BasicFilter>) leftOperand,
-                        (FilterParser.BasicFilter) rightOperand);
-            } else {
-                return handleCompoundOperations(
-                        (FilterParser.BasicFilter) leftOperand,
-                        (FilterParser.BasicFilter) rightOperand);
-            }
-        }
-
-        if (!(rightOperand instanceof FilterParser.Constant)) {
-            throw new Exception(
-                    "expressions of column-op-column are not supported");
-        }
-
         // Assume column is on the left
         return handleSimpleOperations(opId,
                 (FilterParser.ColumnIndex) leftOperand,
@@ -108,10 +97,10 @@ public class HiveFilterBuilder implements FilterParser.FilterBuilder {
      * Handles simple column-operator-constant expressions Creates a special
      * filter in the case the column is the row key column
      */
-    private FilterParser.BasicFilter handleSimpleOperations(FilterParser.Operation opId,
-                                                            FilterParser.ColumnIndex column,
-                                                            FilterParser.Constant constant) {
-        return new FilterParser.BasicFilter(opId, column, constant);
+    private BasicFilter handleSimpleOperations(FilterParser.Operation opId,
+                                               FilterParser.ColumnIndex column,
+                                               FilterParser.Constant constant) {
+        return new BasicFilter(opId, column, constant);
     }
 
     /**
@@ -131,19 +120,32 @@ public class HiveFilterBuilder implements FilterParser.FilterBuilder {
      * @param right right hand filter
      * @return list of filters constructing the filter tree
      */
-    private List<FilterParser.BasicFilter> handleCompoundOperations(List<FilterParser.BasicFilter> left,
-                                                                    FilterParser.BasicFilter right) {
+    private List<BasicFilter> handleCompoundOperations(List<BasicFilter> left,
+                                                       BasicFilter right) {
         left.add(right);
         return left;
     }
 
-    private List<FilterParser.BasicFilter> handleCompoundOperations(FilterParser.BasicFilter left,
-                                                                    FilterParser.BasicFilter right) {
-        List<FilterParser.BasicFilter> result = new LinkedList<FilterParser.BasicFilter>();
+    private List<BasicFilter> handleCompoundOperations(BasicFilter left,
+                                                       BasicFilter right) {
+        List<BasicFilter> result = new LinkedList<BasicFilter>();
 
         result.add(left);
         result.add(right);
 
         return result;
     }
+
+    private Object handleLogicalOperation(FilterParser.LogicalOperation operator, Object leftOperand, Object rightOperand) {
+
+        List<Object> result = new LinkedList<>();
+
+        result.add(leftOperand);
+        result.add(rightOperand);
+        return new LogicalFilter(operator, result);
+    }
+
+    private Object handleLogicalOperation(FilterParser.LogicalOperation operator, Object filter) {
+        return new LogicalFilter(operator, Arrays.asList(filter));
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/726be6cc/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
index 23fc66e..195382a 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
@@ -23,11 +23,13 @@ package org.apache.hawq.pxf.plugins.hive;
 import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
-import org.apache.hawq.pxf.api.FilterParser;
+import org.apache.hawq.pxf.api.BasicFilter;
+import org.apache.hawq.pxf.api.LogicalFilter;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
 import org.apache.hawq.pxf.api.utilities.InputData;
 import org.apache.commons.lang.StringUtils;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import static org.apache.hawq.pxf.plugins.hive.HiveInputFormatFragmenter.PXF_HIVE_SERDES;
@@ -97,24 +99,37 @@ public class HiveORCAccessor extends HiveAccessor {
         String filterStr = inputData.getFilterString();
         HiveFilterBuilder eval = new HiveFilterBuilder(inputData);
         Object filter = eval.getFilterObject(filterStr);
-
         SearchArgument.Builder filterBuilder = SearchArgumentFactory.newBuilder();
-        filterBuilder.startAnd();
-        if (filter instanceof List) {
-            for (Object f : (List<?>) filter) {
-                buildArgument(filterBuilder, f);
-            }
-        } else {
-            buildArgument(filterBuilder, filter);
-        }
-        filterBuilder.end();
+        buildExpression(filterBuilder, Arrays.asList(filter));
         SearchArgument sarg = filterBuilder.build();
         jobConf.set(SARG_PUSHDOWN, sarg.toKryo());
     }
 
+    private void buildExpression(SearchArgument.Builder builder, List<Object> filterList) {
+        for (Object f : filterList) {
+            if (f instanceof LogicalFilter) {
+                switch(((LogicalFilter) f).getOperator()) {
+                    case HDOP_OR:
+                        builder.startOr();
+                        break;
+                    case HDOP_AND:
+                        builder.startAnd();
+                        break;
+                    case HDOP_NOT:
+                        builder.startNot();
+                        break;
+                }
+                buildExpression(builder, ((LogicalFilter) f).getFilterList());
+                builder.end();
+            } else {
+                buildArgument(builder, f);
+            }
+        }
+    }
+
     private void buildArgument(SearchArgument.Builder builder, Object filterObj) {
         /* The below functions will not be compatible and requires update  with Hive 2.0 APIs */
-        FilterParser.BasicFilter filter = (FilterParser.BasicFilter) filterObj;
+        BasicFilter filter = (BasicFilter) filterObj;
         int filterColumnIndex = filter.getColumn().index();
         Object filterValue = filter.getConstant().constant();
         ColumnDescriptor filterColumn = inputData.getColumn(filterColumnIndex);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/726be6cc/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilderTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilderTest.java b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilderTest.java
index bfbfaa4..e0e6536 100755
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilderTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilderTest.java
@@ -20,11 +20,12 @@ package org.apache.hawq.pxf.plugins.hive;
  */
 
 
+import org.apache.hawq.pxf.api.FilterParser.LogicalOperation;
+import org.apache.hawq.pxf.api.LogicalFilter;
 import org.junit.Test;
 
-import java.util.List;
+import org.apache.hawq.pxf.api.BasicFilter;
 
-import static org.apache.hawq.pxf.api.FilterParser.BasicFilter;
 import static org.apache.hawq.pxf.api.FilterParser.Operation;
 import static org.apache.hawq.pxf.api.FilterParser.Operation.*;
 import static org.junit.Assert.assertEquals;
@@ -33,18 +34,33 @@ public class HiveFilterBuilderTest {
     @Test
     public void parseFilterWithThreeOperations() throws Exception {
         HiveFilterBuilder builder = new HiveFilterBuilder(null);
-        String[] consts = new String[] {"first", "2", "3"};
-        Operation[] ops = new Operation[] {HDOP_EQ, HDOP_GT, HDOP_LT};
-        int[] idx = new int[] {1, 2, 3};
-
-        @SuppressWarnings("unchecked")
-        List<BasicFilter> filterList = (List) builder.getFilterObject("a1c\"first\"o5a2c2o2o7a3c3o1o7");
-        assertEquals(consts.length, filterList.size());
-        for (int i = 0; i < filterList.size(); i++) {
-            BasicFilter filter = filterList.get(i);
-            assertEquals(filter.getConstant().constant().toString(), consts[i]);
-            assertEquals(filter.getOperation(), ops[i]);
-            assertEquals(filter.getColumn().index(), idx[i]);
-        }
+        String[] consts = new String[] {"first", "2"};
+        Operation[] ops = new Operation[] {HDOP_EQ, HDOP_GT};
+        int[] idx = new int[] {1, 2};
+
+        LogicalFilter filterList = (LogicalFilter) builder.getFilterObject("a1c\"first\"o5a2c2o2l0");
+        assertEquals(LogicalOperation.HDOP_AND, filterList.getOperator());
+        BasicFilter leftOperand = (BasicFilter) filterList.getFilterList().get(0);
+        assertEquals(consts[0], leftOperand.getConstant().constant());
+        assertEquals(idx[0], leftOperand.getColumn().index());
+        assertEquals(ops[0], leftOperand.getOperation());
+    }
+
+    @Test
+    public void parseFilterWithLogicalOperation() throws Exception {
+        HiveFilterBuilder builder = new HiveFilterBuilder(null);
+        LogicalFilter filter = (LogicalFilter) builder.getFilterObject("a1c\"first\"o5a2c2o2l0");
+        assertEquals(LogicalOperation.HDOP_AND, filter.getOperator());
+        assertEquals(2, filter.getFilterList().size());
     }
+
+    @Test
+    public void parseNestedExpressionWithLogicalOperation() throws Exception {
+        HiveFilterBuilder builder = new HiveFilterBuilder(null);
+        LogicalFilter filter = (LogicalFilter) builder.getFilterObject("a1c\"first\"o5a2c2o2l0a1c1o1l1");
+        assertEquals(LogicalOperation.HDOP_OR, filter.getOperator());
+        assertEquals(LogicalOperation.HDOP_AND, ((LogicalFilter) filter.getFilterList().get(0)).getOperator());
+        assertEquals(HDOP_LT, ((BasicFilter) filter.getFilterList().get(1)).getOperation());
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/726be6cc/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCSearchArgumentExample.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCSearchArgumentExample.java b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCSearchArgumentExample.java
new file mode 100644
index 0000000..a520b94
--- /dev/null
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCSearchArgumentExample.java
@@ -0,0 +1,84 @@
+package org.apache.hawq.pxf.plugins.hive;
+
+import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
+import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
+import org.apache.hawq.pxf.api.BasicFilter;
+import org.apache.hawq.pxf.api.LogicalFilter;
+import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class HiveORCSearchArgumentExample {
+
+    @Test
+    public void buildLogicalOperationTree() throws Exception {
+
+        /* Predicate pushdown configuration */
+        String filterStr = "a2c1o2a3c3o3l0a4c5o1l1";
+        HiveFilterBuilder eval = new HiveFilterBuilder(null);
+        Object filter = eval.getFilterObject(filterStr);
+
+        Object current = filter;
+        SearchArgument.Builder filterBuilder = SearchArgumentFactory.newBuilder();
+        buildExpression(filterBuilder, Arrays.asList(filter));
+        SearchArgument sarg = filterBuilder.build();
+        Assert.assertEquals("and(or(lt(col1, 5), not(lteq(col1, 1))), or(lt(col1, 5), lteq(col1, 3)))", sarg.toFilterPredicate().toString());
+    }
+
+    private void buildExpression(SearchArgument.Builder builder, List<Object> filterList) {
+        for (Object f : filterList) {
+            if (f instanceof LogicalFilter) {
+                switch(((LogicalFilter) f).getOperator()) {
+                    case HDOP_OR:
+                        builder.startOr();
+                        break;
+                    case HDOP_AND:
+                        builder.startAnd();
+                        break;
+                    case HDOP_NOT:
+                        builder.startNot();
+                        break;
+                }
+                buildExpression(builder, ((LogicalFilter) f).getFilterList());
+                builder.end();
+            } else {
+                buildArgument(builder, f);
+            }
+        }
+    }
+
+    private void buildArgument(SearchArgument.Builder builder, Object filterObj) {
+        /* The below functions will not be compatible and requires update  with Hive 2.0 APIs */
+        BasicFilter filter = (BasicFilter) filterObj;
+        int filterColumnIndex = filter.getColumn().index();
+        Object filterValue = filter.getConstant().constant();
+        Integer[] arr = {};
+        ColumnDescriptor filterColumn = new ColumnDescriptor("col1", 1,1, "Integer", arr);
+        String filterColumnName = filterColumn.columnName();
+
+        switch(filter.getOperation()) {
+            case HDOP_LT:
+                builder.lessThan(filterColumnName, filterValue);
+                break;
+            case HDOP_GT:
+                builder.startNot().lessThanEquals(filterColumnName, filterValue).end();
+                break;
+            case HDOP_LE:
+                builder.lessThanEquals(filterColumnName, filterValue);
+                break;
+            case HDOP_GE:
+                builder.startNot().lessThanEquals(filterColumnName, filterValue).end();
+                break;
+            case HDOP_EQ:
+                builder.equals(filterColumnName, filterValue);
+                break;
+            case HDOP_NE:
+                builder.startNot().equals(filterColumnName, filterValue).end();
+                break;
+        }
+        return;
+    }
+}



[02/18] incubator-hawq git commit: HAWQ-1048. Added free logic.

Posted by od...@apache.org.
HAWQ-1048. Added free logic.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/7ef7e0fb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/7ef7e0fb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/7ef7e0fb

Branch: refs/heads/HAWQ-964
Commit: 7ef7e0fbfe332d28755175045b1835f5c1117e25
Parents: cd186f6
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Mon Sep 19 14:05:22 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Mon Sep 19 14:05:22 2016 -0700

----------------------------------------------------------------------
 src/backend/access/external/pxffilters.c | 31 +++------------------------
 1 file changed, 3 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7ef7e0fb/src/backend/access/external/pxffilters.c
----------------------------------------------------------------------
diff --git a/src/backend/access/external/pxffilters.c b/src/backend/access/external/pxffilters.c
index 76e83b4..bcd781c 100644
--- a/src/backend/access/external/pxffilters.c
+++ b/src/backend/access/external/pxffilters.c
@@ -33,7 +33,6 @@
 
 static List* pxf_make_expression_items_list(List *quals);
 static void pxf_free_filter(PxfFilterDesc* filter);
-static void pxf_free_filter_list(List *filters);
 static char* pxf_serialize_filter_list(List *filters);
 static bool opexpr_to_pxffilter(OpExpr *expr, PxfFilterDesc *filter);
 static bool supported_filter_type(Oid type);
@@ -226,30 +225,6 @@ pxf_free_filter(PxfFilterDesc* filter)
 }
 
 /*
- * pxf_free_filter_list
- *
- * free all memory associated with the filters once no longer needed.
- * alternatively we could have allocated them in a shorter lifespan
- * memory context, however explicitly freeing them is easier and makes
- * more sense.
- */
-static void
-pxf_free_filter_list(List *filters)
-{
-	ListCell		*lc 	= NULL;
-	PxfFilterDesc 	*filter = NULL;
-
-	if (list_length(filters) == 0)
-		return;
-
-	foreach (lc, filters)
-	{
-		filter	= (PxfFilterDesc *) lfirst(lc);
-		pxf_free_filter(filter);
-	}
-}
-
-/*
  * pxf_serialize_filter_list
  *
  * Given a list of implicitly ANDed PxfFilterDesc objects, produce a
@@ -328,9 +303,10 @@ pxf_serialize_filter_list(List *expressionItems)
 										 "filter_list. Found a non const+attr filter")));
 					}
 					appendStringInfo(resbuf, "%c%d", PXF_OPERATOR_CODE, o);
-				}
-				else
+					pxf_free_filter(filter);
+				} else{
 					pfree(filter);
+				}
 				break;
 			}
 			case T_BoolExpr:
@@ -621,7 +597,6 @@ char *serializePxfFilterQuals(List *quals)
 
 		List *expressionItems = pxf_make_expression_items_list(quals);
 		result  = pxf_serialize_filter_list(expressionItems);
-		//pxf_free_filter_list(expressionItems);
 	}
 	elog(DEBUG2, "serializePxfFilterQuals: filter result: %s", (result == NULL) ? "null" : result);
 


[18/18] incubator-hawq git commit: Merge branch 'HAWQ-1048' into HAWQ-964

Posted by od...@apache.org.
Merge branch 'HAWQ-1048' into HAWQ-964


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/509aef12
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/509aef12
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/509aef12

Branch: refs/heads/HAWQ-964
Commit: 509aef120a33261555234cdfb57f8ca32d9daef3
Parents: 55db7ec dffc760
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Wed Oct 5 11:28:49 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Wed Oct 5 11:28:49 2016 -0700

----------------------------------------------------------------------
 .travis.yml                                     |    4 +-
 LICENSE                                         |   39 +-
 licenses/LICENSE-json.txt                       |  131 --
 licenses/LICENSE-pexpect-4.2.txt                |   20 +
 licenses/LICENSE-plperl.txt                     |  135 --
 licenses/LICENSE-ppport.txt                     |  135 ++
 licenses/LICENSE-ptyprocess-0.5.1.txt           |   16 +
 src/backend/access/external/pxffilters.c        |  295 ++-
 src/backend/access/external/pxfheaders.c        |    9 +-
 .../access/external/test/pxffilters_test.c      |   20 +-
 .../access/external/test/pxfheaders_test.c      |    9 +
 .../utils/mmgr/test/memaccounting_test.c        |    2 +
 src/include/access/pxffilters.h                 |   10 +-
 src/include/catalog/JSON.pm                     | 2223 ------------------
 src/include/catalog/JSON/PP.pm                  | 2191 -----------------
 src/include/catalog/JSON/PP/Boolean.pm          |   26 -
 src/include/catalog/JSON/PP5005.pm              |  146 --
 src/include/catalog/JSON/PP56.pm                |  198 --
 src/include/catalog/JSON/PP58.pm                |   93 -
 .../feature/ManagementTool/test_hawq_register.h |   31 +
 .../test_hawq_register_usage2_case1.cpp         |  155 ++
 .../test_hawq_register_usage2_case2.cpp         |   85 +-
 .../test_hawq_register_usage2_case3.cpp         |   85 +-
 .../ManagementTool/usage2case1/bucket0_tpl.yml  |   31 +
 .../usage2case1/error_encoding_tpl.yml          |   21 +
 .../usage2case1/includedirectory.yml            |   21 +
 .../usage2case1/larger_eof_tpl.yml              |   21 +
 .../wrong_distributed_policy_tpl.yml            |   21 +
 tools/bin/Makefile                              |   12 -
 tools/bin/hawqregister                          |  123 +-
 tools/bin/lib/.gitignore                        |    7 -
 31 files changed, 918 insertions(+), 5397 deletions(-)
----------------------------------------------------------------------



[15/18] incubator-hawq git commit: HAWQ-1048. Updated log level to DEBUG1.

Posted by od...@apache.org.
HAWQ-1048. Updated log level to DEBUG1.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/38cb4b00
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/38cb4b00
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/38cb4b00

Branch: refs/heads/HAWQ-964
Commit: 38cb4b0040122992837c9655d52241b6b9757529
Parents: d563ab5
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Mon Sep 26 12:23:32 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Mon Sep 26 12:23:32 2016 -0700

----------------------------------------------------------------------
 src/backend/access/external/pxffilters.c | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/38cb4b00/src/backend/access/external/pxffilters.c
----------------------------------------------------------------------
diff --git a/src/backend/access/external/pxffilters.c b/src/backend/access/external/pxffilters.c
index af6d2ea..4c28bc1 100644
--- a/src/backend/access/external/pxffilters.c
+++ b/src/backend/access/external/pxffilters.c
@@ -266,7 +266,7 @@ pxf_make_expression_items_list(List *quals, Node *parent)
 				break;
 			}
 			default:
-				elog(DEBUG5, "pxf_make_expression_items_list: unsupported node tag %d", tag);
+				elog(DEBUG1, "pxf_make_expression_items_list: unsupported node tag %d", tag);
 				break;
 		}
 	}
@@ -343,7 +343,7 @@ pxf_serialize_filter_list(List *expressionItems)
 		{
 			case T_OpExpr:
 			{
-				elog(DEBUG5, "pxf_serialize_filter_list: node tag %d (T_OpExpr)", tag);
+				elog(DEBUG1, "pxf_serialize_filter_list: node tag %d (T_OpExpr)", tag);
 				PxfFilterDesc *filter = (PxfFilterDesc *) palloc0(sizeof(PxfFilterDesc));
 				OpExpr *expr = (OpExpr *) node;
 				if (opexpr_to_pxffilter(expr, filter))
@@ -386,7 +386,7 @@ pxf_serialize_filter_list(List *expressionItems)
 			{
 				BoolExpr *expr = (BoolExpr *) node;
 				BoolExprType boolType = expr->boolop;
-				elog(DEBUG5, "pxf_serialize_filter_list: node tag %d (T_BoolExpr), bool node type %d", tag, boolType);
+				elog(DEBUG1, "pxf_serialize_filter_list: node tag %d (T_BoolExpr), bool node type %d", tag, boolType);
 				appendStringInfo(resbuf, "%c%d", PXF_LOGICAL_OPERATOR_CODE, boolType);
 				break;
 			}
@@ -430,12 +430,12 @@ opexpr_to_pxffilter(OpExpr *expr, PxfFilterDesc *filter)
 	/* only binary oprs supported currently */
 	if (!rightop)
 	{
-		elog(DEBUG5, "opexpr_to_pxffilter: unary op! leftop_type: %d, op: %d",
+		elog(DEBUG1, "opexpr_to_pxffilter: unary op! leftop_type: %d, op: %d",
 			 leftop_type, expr->opno);
 		return false;
 	}
 
-	elog(DEBUG5, "opexpr_to_gphdfilter: leftop (expr type: %d, arg type: %d), "
+	elog(DEBUG1, "opexpr_to_gphdfilter: leftop (expr type: %d, arg type: %d), "
 			"rightop_type (expr type: %d, arg type %d), op: %d",
 			leftop_type, nodeTag(leftop),
 			rightop_type, nodeTag(rightop),
@@ -674,7 +674,7 @@ char *serializePxfFilterQuals(List *quals)
 	}
 
 
-	elog(DEBUG2, "serializePxfFilterQuals: filter result: %s", (result == NULL) ? "null" : result);
+	elog(DEBUG1, "serializePxfFilterQuals: filter result: %s", (result == NULL) ? "null" : result);
 
 	return result;
 }


[04/18] incubator-hawq git commit: HAWQ-964. Add New Filter Classes

Posted by od...@apache.org.
HAWQ-964. Add New Filter Classes


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/545f8aa0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/545f8aa0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/545f8aa0

Branch: refs/heads/HAWQ-964
Commit: 545f8aa0ab28f2a1112fb43d232e9db2b2cc5a80
Parents: 726be6c
Author: Kavinder Dhaliwal <ka...@gmail.com>
Authored: Mon Sep 19 14:25:08 2016 -0700
Committer: Kavinder Dhaliwal <ka...@gmail.com>
Committed: Tue Sep 20 09:45:44 2016 -0700

----------------------------------------------------------------------
 .../org/apache/hawq/pxf/api/BasicFilter.java    | 37 ++++++++++++++++++++
 .../org/apache/hawq/pxf/api/LogicalFilter.java  | 30 ++++++++++++++++
 .../plugins/hbase/HBaseFilterBuilderTest.java   | 16 +++++++++
 3 files changed, 83 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/545f8aa0/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/BasicFilter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/BasicFilter.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/BasicFilter.java
new file mode 100644
index 0000000..a35a9dd
--- /dev/null
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/BasicFilter.java
@@ -0,0 +1,37 @@
+package org.apache.hawq.pxf.api;
+
+/**
+ * Basic filter provided for cases where the target storage system does not provide it own filter
+ * For example: Hbase storage provides its own filter but for a Writable based record in a
+ * SequenceFile there is no filter provided and so we need to have a default
+ */
+public class BasicFilter {
+    private FilterParser.Operation oper;
+    private FilterParser.ColumnIndex column;
+    private FilterParser.Constant constant;
+
+    /**
+     * Constructs a BasicFilter.
+     *
+     * @param oper the parse operation to perform
+     * @param column the column index
+     * @param constant the constant object
+     */
+    public BasicFilter(FilterParser.Operation oper, FilterParser.ColumnIndex column, FilterParser.Constant constant) {
+        this.oper = oper;
+        this.column = column;
+        this.constant = constant;
+    }
+
+    public FilterParser.Operation getOperation() {
+        return oper;
+    }
+
+    public FilterParser.ColumnIndex getColumn() {
+        return column;
+    }
+
+    public FilterParser.Constant getConstant() {
+        return constant;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/545f8aa0/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/LogicalFilter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/LogicalFilter.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/LogicalFilter.java
new file mode 100644
index 0000000..d7e570e
--- /dev/null
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/LogicalFilter.java
@@ -0,0 +1,30 @@
+package org.apache.hawq.pxf.api;
+
+
+import java.util.List;
+
+public class LogicalFilter {
+    private FilterParser.LogicalOperation operator;
+    private List<Object> filterList;
+
+    public LogicalFilter(FilterParser.LogicalOperation operator, List<Object> result) {
+        this.operator = operator;
+        this.filterList = result;
+    }
+
+    public FilterParser.LogicalOperation getOperator() {
+        return operator;
+    }
+
+    public void setOperator(FilterParser.LogicalOperation operator) {
+        this.operator = operator;
+    }
+
+    public List<Object> getFilterList() {
+        return filterList;
+    }
+
+    public void setFilterList(List<Object> filterList) {
+        this.filterList = filterList;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/545f8aa0/pxf/pxf-hbase/src/test/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilderTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hbase/src/test/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilderTest.java b/pxf/pxf-hbase/src/test/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilderTest.java
new file mode 100644
index 0000000..9bc5474
--- /dev/null
+++ b/pxf/pxf-hbase/src/test/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilderTest.java
@@ -0,0 +1,16 @@
+package org.apache.hawq.pxf.plugins.hbase;
+
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class HBaseFilterBuilderTest {
+
+    @Test
+    public void parseNOTExpressionIgnored() throws Exception {
+        String filter = "a1c2o1a1c2o2l0l2";
+        HBaseFilterBuilder builder = new HBaseFilterBuilder(null);
+        assertNull(builder.getFilterObject(filter));
+    }
+
+}
\ No newline at end of file


[10/18] incubator-hawq git commit: HAWQ-1048. Discard whole filter if at least one operator is not supported.

Posted by od...@apache.org.
 HAWQ-1048. Discard whole filter if at least one operator is not supported.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/694ad038
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/694ad038
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/694ad038

Branch: refs/heads/HAWQ-964
Commit: 694ad0383cce829057636365a87b92056bb52e03
Parents: 0ff7037
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Thu Sep 22 11:43:19 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Thu Sep 22 11:43:19 2016 -0700

----------------------------------------------------------------------
 src/backend/access/external/pxffilters.c | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/694ad038/src/backend/access/external/pxffilters.c
----------------------------------------------------------------------
diff --git a/src/backend/access/external/pxffilters.c b/src/backend/access/external/pxffilters.c
index 8c3864d..847f120 100644
--- a/src/backend/access/external/pxffilters.c
+++ b/src/backend/access/external/pxffilters.c
@@ -301,8 +301,12 @@ pxf_serialize_filter_list(List *expressionItems)
 					}
 					appendStringInfo(resbuf, "%c%d", PXF_OPERATOR_CODE, o);
 					pxf_free_filter(filter);
-				} else{
+				} else {
+					/* if at least one expression item is not supported, whole filter doesn't make sense*/
+					elog(INFO, "Query will not be optimized to use filter push-down.");
 					pfree(filter);
+					pfree(resbuf->data);
+					return NULL;
 				}
 				break;
 			}


[14/18] incubator-hawq git commit: HAWQ-1048. Changed logic to support more than one logical operator in list.

Posted by od...@apache.org.
HAWQ-1048. Changed logic to support more than one logical operator in list.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/d563ab5b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/d563ab5b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/d563ab5b

Branch: refs/heads/HAWQ-964
Commit: d563ab5b184b07b86406659ebe0abec7e4915d81
Parents: 9225016
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Fri Sep 23 18:35:41 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Fri Sep 23 18:35:41 2016 -0700

----------------------------------------------------------------------
 src/backend/access/external/pxffilters.c | 64 ++++++++++++++++++++++++---
 src/include/access/pxffilters.h          |  8 ++++
 2 files changed, 65 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/d563ab5b/src/backend/access/external/pxffilters.c
----------------------------------------------------------------------
diff --git a/src/backend/access/external/pxffilters.c b/src/backend/access/external/pxffilters.c
index e945062..af6d2ea 100644
--- a/src/backend/access/external/pxffilters.c
+++ b/src/backend/access/external/pxffilters.c
@@ -31,7 +31,7 @@
 #include "utils/guc.h"
 #include "utils/lsyscache.h"
 
-static List* pxf_make_expression_items_list(List *quals);
+static List* pxf_make_expression_items_list(List *quals, Node *parent);
 static void pxf_free_filter(PxfFilterDesc* filter);
 static char* pxf_serialize_filter_list(List *filters);
 static bool opexpr_to_pxffilter(OpExpr *expr, PxfFilterDesc *filter);
@@ -180,6 +180,28 @@ Oid pxf_supported_types[] =
 	TIMESTAMPOID
 };
 
+static void
+pxf_free_filter_list(List *expressionItems)
+{
+	ListCell		*lc 	= NULL;
+	ExpressionItem 	*expressionItem = NULL;
+	int previousLength;
+
+	while (list_length(expressionItems) > 0)
+	{
+		expressionItem = (ExpressionItem *) lfirst(list_head(expressionItems));
+		pfree(expressionItem);
+
+		/* to avoid freeing already freed items - delete all occurrences of current expression*/
+		previousLength = expressionItems->length + 1;
+		while (expressionItems != NULL && previousLength > expressionItems->length)
+		{
+			previousLength = expressionItems->length;
+			expressionItems = list_delete_ptr(expressionItems, expressionItem);
+		}
+	}
+}
+
 /*
  * pxf_make_expression_items_list
  *
@@ -189,12 +211,15 @@ Oid pxf_supported_types[] =
  *
  * Basically this function just transforms expression tree to Reversed Polish Notation list.
  *
+ *
  */
 static List *
-pxf_make_expression_items_list(List *quals)
+pxf_make_expression_items_list(List *quals, Node *parent)
 {
+	ExpressionItem *expressionItem = NULL;
 	List			*result = NIL;
 	ListCell		*lc = NULL;
+	ListCell		*ilc = NULL;
 	
 	if (list_length(quals) == 0)
 		return NIL;
@@ -203,20 +228,41 @@ pxf_make_expression_items_list(List *quals)
 	{
 		Node *node = (Node *) lfirst(lc);
 		NodeTag tag = nodeTag(node);
+		expressionItem = (ExpressionItem *) palloc0(sizeof(ExpressionItem));
+		expressionItem->node = node;
+		expressionItem->parent = parent;
+		expressionItem->processed = false;
 
 		switch (tag)
 		{
 			case T_OpExpr:
 			{
-				result = lappend(result, node);
+				result = lappend(result, expressionItem);
 				break;
 			}
 			case T_BoolExpr:
 			{
 				BoolExpr	*expr = (BoolExpr *) node;
-				List *inner_result = pxf_make_expression_items_list(expr->args);
+				List *inner_result = pxf_make_expression_items_list(expr->args, node);
 				result = list_concat(result, inner_result);
-				result = lappend(result, node);
+
+				int childNodesNum = 0;
+
+				/* Find number of child nodes on first level*/
+				foreach (ilc, inner_result)
+				{
+					ExpressionItem *ei = (ExpressionItem *) lfirst(ilc);
+					if (!ei->processed && ei->parent == node)
+					{
+						ei->processed = true;
+						childNodesNum++;
+					}
+				}
+
+				for (int i = 0; i < childNodesNum - 1; i++)
+				{
+					result = lappend(result, expressionItem);
+				}
 				break;
 			}
 			default:
@@ -289,7 +335,8 @@ pxf_serialize_filter_list(List *expressionItems)
 	 */
 	foreach (lc, expressionItems)
 	{
-		Node *node = (Node *) lfirst(lc);
+		ExpressionItem *expressionItem = (ExpressionItem *) lfirst(lc);
+		Node *node = expressionItem->node;
 		NodeTag tag = nodeTag(node);
 
 		switch (tag)
@@ -621,9 +668,12 @@ char *serializePxfFilterQuals(List *quals)
 	if (pxf_enable_filter_pushdown)
 	{
 
-		List *expressionItems = pxf_make_expression_items_list(quals);
+		List *expressionItems = pxf_make_expression_items_list(quals, NULL);
 		result  = pxf_serialize_filter_list(expressionItems);
+		pxf_free_filter_list(expressionItems);
 	}
+
+
 	elog(DEBUG2, "serializePxfFilterQuals: filter result: %s", (result == NULL) ? "null" : result);
 
 	return result;

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/d563ab5b/src/include/access/pxffilters.h
----------------------------------------------------------------------
diff --git a/src/include/access/pxffilters.h b/src/include/access/pxffilters.h
index a1cd661..894337b 100644
--- a/src/include/access/pxffilters.h
+++ b/src/include/access/pxffilters.h
@@ -92,6 +92,14 @@ typedef struct dbop_pxfop_map
 
 } dbop_pxfop_map;
 
+
+typedef struct ExpressionItem
+{
+	Node	*node;
+	Node	*parent;
+	bool	processed;
+} ExpressionItem;
+
 static inline bool pxfoperand_is_attr(PxfOperand x)
 {
 	return (x.opcode == PXF_ATTR_CODE);


[06/18] incubator-hawq git commit: HAWQ-964. Update unittests and remove @Test from example code

Posted by od...@apache.org.
HAWQ-964. Update unittests and remove @Test from example code


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/f3668dcc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/f3668dcc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/f3668dcc

Branch: refs/heads/HAWQ-964
Commit: f3668dcca2d5862247ba4d5e8e17e7bd2778aba8
Parents: 545f8aa
Author: Kavinder Dhaliwal <ka...@gmail.com>
Authored: Mon Sep 19 14:51:10 2016 -0700
Committer: Kavinder Dhaliwal <ka...@gmail.com>
Committed: Tue Sep 20 09:45:44 2016 -0700

----------------------------------------------------------------------
 .../apache/hawq/pxf/api/FilterParserTest.java   | 71 ++++++++++----------
 .../hive/HiveORCSearchArgumentExample.java      |  1 -
 2 files changed, 35 insertions(+), 37 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f3668dcc/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java
index 83bb2dc..a129a4b 100644
--- a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java
+++ b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java
@@ -288,40 +288,65 @@ public class FilterParserTest {
 
     @Test
     public void parseLogicalAndOperator() throws Exception {
-        filter = "l0";
-        Object op = "filter with 1 AND operator";
+        filter = "a1c0o5a2c3o2l0";
+
+        Object firstOp = "first operation HDOP_EQ";
+        Object secondOp = "second operation HDOP_GT";
+        Object lastOp = "filter with 2 operations connected by AND";
+
+        when(filterBuilder.build(eq(Operation.HDOP_EQ),
+                any(),
+                any())).thenReturn(firstOp);
+
+        when(filterBuilder.build(eq(Operation.HDOP_GT),
+                any(),
+                any())).thenReturn(secondOp);
 
         when(filterBuilder.build(eq(LogicalOperation.HDOP_AND),
                 any(),
-                any())).thenReturn(op);
+                any())).thenReturn(lastOp);
 
         Object result = filterParser.parse(filter);
 
-        assertEquals(op, result);
+        assertEquals(lastOp, result);
     }
 
     @Test
     public void parseLogicalOrOperator() throws Exception {
-        filter = "l1";
+        filter = "a1c0o5a2c3o2l1";
+
+        Object firstOp = "first operation HDOP_EQ";
+        Object secondOp = "second operation HDOP_GT";
+        Object lastOp = "filter with 1 OR operator";
 
-        Object op = "filter with 1 OR operator";
+        when(filterBuilder.build(eq(Operation.HDOP_EQ),
+                any(),
+                any())).thenReturn(firstOp);
+
+        when(filterBuilder.build(eq(Operation.HDOP_GT),
+                any(),
+                any())).thenReturn(secondOp);
 
         when(filterBuilder.build(eq(LogicalOperation.HDOP_OR),
                 any(),
-                any())).thenReturn(op);
+                any())).thenReturn(lastOp);
 
         Object result = filterParser.parse(filter);
-        assertEquals(op, result);
+        assertEquals(lastOp, result);
     }
 
     @Test
     public void parseLogicalNotOperator() throws Exception {
-        filter = "l2";
+        filter = "a1c0o5l2";
 
+        Object firstOp = "first operation HDOP_EQ";
         Object op = "filter with NOT operator";
 
-        when(filterBuilder.build(eq(LogicalOperation.HDOP_NOT),
+        when(filterBuilder.build(eq(Operation.HDOP_EQ),
                 any(),
+                any())).thenReturn(firstOp);
+
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_NOT),
                 any())).thenReturn(op);
 
         Object result = filterParser.parse(filter);
@@ -344,31 +369,6 @@ public class FilterParserTest {
     }
 
     @Test
-    public void parseLogicalOperatorWithExpressions() throws Exception {
-        filter = "a1c\"first\"o5a2c2o2l0";
-        Object firstOp = "first operation HDOP_EQ";
-        Object secondOp = "second operation HDOP_GT";
-        Object lastOp = "filter with 2 operations connected by AND";
-
-        when(filterBuilder.build(eq(Operation.HDOP_EQ),
-                any(),
-                any())).thenReturn(firstOp);
-
-
-        when(filterBuilder.build(eq(Operation.HDOP_GT),
-                any(),
-                any())).thenReturn(secondOp);
-
-        when(filterBuilder.build(eq(LogicalOperation.HDOP_AND),
-                any(),
-                any())).thenReturn(lastOp);
-
-
-        Object result = filterParser.parse(filter);
-        assertEquals(lastOp, result);
-    }
-
-    @Test
     public void parseLogicalOperatorNotExpression() throws Exception {
         filter = "a1c\"first\"o5a2c2o2l0l2";
         Object firstOp = "first operation HDOP_EQ";
@@ -396,7 +396,6 @@ public class FilterParserTest {
         assertEquals(lastOp, result);
     }
 
-
 	/*
      * Helper functions
 	 */

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f3668dcc/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCSearchArgumentExample.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCSearchArgumentExample.java b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCSearchArgumentExample.java
index a520b94..d884022 100644
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCSearchArgumentExample.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCSearchArgumentExample.java
@@ -13,7 +13,6 @@ import java.util.List;
 
 public class HiveORCSearchArgumentExample {
 
-    @Test
     public void buildLogicalOperationTree() throws Exception {
 
         /* Predicate pushdown configuration */


[03/18] incubator-hawq git commit: HAWQ-1048. Updated comments.

Posted by od...@apache.org.
HAWQ-1048. Updated comments.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/7a9cc88d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/7a9cc88d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/7a9cc88d

Branch: refs/heads/HAWQ-964
Commit: 7a9cc88df6b53bbe6cb9b6e5735f08da1a651a42
Parents: 7ef7e0f
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Mon Sep 19 17:01:25 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Mon Sep 19 17:01:25 2016 -0700

----------------------------------------------------------------------
 src/backend/access/external/pxffilters.c | 35 ++++++++-------------------
 src/include/access/pxffilters.h          |  7 ------
 2 files changed, 10 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7a9cc88d/src/backend/access/external/pxffilters.c
----------------------------------------------------------------------
diff --git a/src/backend/access/external/pxffilters.c b/src/backend/access/external/pxffilters.c
index bcd781c..0d1e1fa 100644
--- a/src/backend/access/external/pxffilters.c
+++ b/src/backend/access/external/pxffilters.c
@@ -159,10 +159,11 @@ Oid pxf_supported_types[] =
  * pxf_make_expression_items_list
  *
  * Given a scan node qual list, find the filters that are eligible to be used
- * by PXF, construct a PxfFilterDesc list that describes the filter information,
+ * by PXF, construct an expressions list, which consists of OpExpr or BoolExpr nodes
  * and return it to the caller.
  *
- * Caller is responsible for pfreeing the returned PxfFilterDesc List.
+ * Basically this function just transforms expression tree to Reversed Polish Notation list.
+ *
  */
 static List *
 pxf_make_expression_items_list(List *quals)
@@ -227,7 +228,7 @@ pxf_free_filter(PxfFilterDesc* filter)
 /*
  * pxf_serialize_filter_list
  *
- * Given a list of implicitly ANDed PxfFilterDesc objects, produce a
+ * Takes expression items list in RPN notation, produce a
  * serialized string representation in order to communicate this list
  * over the wire.
  *
@@ -239,9 +240,7 @@ pxf_free_filter(PxfFilterDesc* filter)
  *
  * Example filter list:
  *
- * Column(0) > 1
- * Column(0) < 5
- * Column(2) == "third"
+ * Column(0) > 1 AND Column(0) < 5 AND Column(2) == "third"
  *
  * Yields the following serialized string:
  *
@@ -261,10 +260,7 @@ pxf_serialize_filter_list(List *expressionItems)
 	initStringInfo(resbuf);
 
 	/*
-	 * Iterate through the filters in the list and serialize them one after
-	 * the other. We use buffer copying because it's clear. Considering the
-	 * typical small number of memcpy's this generates overall, there's no
-	 * point in optimizing, better keep it clear.
+	 * Iterate through the expression items in the list and serialize them one after the other.
 	 */
 	foreach (lc, expressionItems)
 	{
@@ -275,6 +271,7 @@ pxf_serialize_filter_list(List *expressionItems)
 		{
 			case T_OpExpr:
 			{
+				elog(DEBUG5, "pxf_serialize_filter_list: node tag %d (T_OpExpr)", tag);
 				PxfFilterDesc *filter = (PxfFilterDesc *) palloc0(sizeof(PxfFilterDesc));
 				OpExpr *expr = (OpExpr *) node;
 				if (opexpr_to_pxffilter(expr, filter))
@@ -296,7 +293,7 @@ pxf_serialize_filter_list(List *expressionItems)
 					}
 					else
 					{
-						/* pxf_make_filter_list() should have never let this happen */
+						/* opexpr_to_pxffilter() should have never let this happen */
 						ereport(ERROR,
 								(errcode(ERRCODE_INTERNAL_ERROR),
 								 errmsg("internal error in pxffilters.c:pxf_serialize_"
@@ -313,20 +310,8 @@ pxf_serialize_filter_list(List *expressionItems)
 			{
 				BoolExpr *expr = (BoolExpr *) node;
 				BoolExprType boolType = expr->boolop;
-				PxfOperatorCode pxfOperandCode;
-				switch (boolType)
-				{
-					case AND_EXPR:
-						pxfOperandCode = PXFLOP_AND;
-						break;
-					case OR_EXPR:
-						pxfOperandCode = PXFLOP_OR;
-						break;
-					case NOT_EXPR:
-						pxfOperandCode = PXFLOP_NOT;
-						break;
-				}
-				appendStringInfo(resbuf, "%c%d", PXF_LOGICAL_OPERATOR_CODE, pxfOperandCode);
+				elog(DEBUG5, "pxf_serialize_filter_list: node tag %d (T_BoolExpr), bool node type %d", tag, boolType);
+				appendStringInfo(resbuf, "%c%d", PXF_LOGICAL_OPERATOR_CODE, boolType);
 				break;
 			}
 		}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7a9cc88d/src/include/access/pxffilters.h
----------------------------------------------------------------------
diff --git a/src/include/access/pxffilters.h b/src/include/access/pxffilters.h
index 3e80944..a1cd661 100644
--- a/src/include/access/pxffilters.h
+++ b/src/include/access/pxffilters.h
@@ -48,13 +48,6 @@ typedef enum PxfOperatorCode
 
 } PxfOperatorCode;
 
-typedef enum PxfLogicalOperatorCode
-{
-	PXFLOP_AND = 0,
-	PXFLOP_OR,
-	PXFLOP_NOT,
-} PxfLogicalOperatorCode;
-
 /*
  * each supported operand from both sides of the operator is represented
  * by a code that will describe the operator type in the final serialized


[09/18] incubator-hawq git commit: HAWQ-964. Fix issues where only one filter is present

Posted by od...@apache.org.
HAWQ-964. Fix issues where only one filter is present


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/8f73c2b9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/8f73c2b9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/8f73c2b9

Branch: refs/heads/HAWQ-964
Commit: 8f73c2b9295e93be68958b0c6b35a53143e5f7a7
Parents: 93052b9
Author: Kavinder Dhaliwal <ka...@gmail.com>
Authored: Thu Sep 22 11:34:11 2016 -0700
Committer: Kavinder Dhaliwal <ka...@gmail.com>
Committed: Thu Sep 22 11:34:11 2016 -0700

----------------------------------------------------------------------
 .../apache/hawq/pxf/plugins/hive/HiveAccessor.java    |  2 +-
 .../apache/hawq/pxf/plugins/hive/HiveORCAccessor.java | 14 +++++++++++++-
 2 files changed, 14 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8f73c2b9/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
index 2f1c26e..b2b3e4b 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
@@ -276,7 +276,7 @@ public class HiveAccessor extends HdfsSplittableDataAccessor {
                                   Object filter, InputData input) {
         // Let's look first at the filter and escape if there are any OR or NOT ops
         if (!testForUnsupportedOperators(Arrays.asList(filter)))
-            return false;
+            return true;
 
         return testForPartitionEquality(partitionFields, Arrays.asList(filter), input);
     }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8f73c2b9/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
index 195382a..ab2f96e 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
@@ -100,7 +100,19 @@ public class HiveORCAccessor extends HiveAccessor {
         HiveFilterBuilder eval = new HiveFilterBuilder(inputData);
         Object filter = eval.getFilterObject(filterStr);
         SearchArgument.Builder filterBuilder = SearchArgumentFactory.newBuilder();
-        buildExpression(filterBuilder, Arrays.asList(filter));
+
+        /*
+         * If there is only a single filter it will be of type Basic Filter
+         * need special case logic to make sure to still wrap the filter in a
+         * startAnd() & end() block
+         */
+        if (filter instanceof LogicalFilter)
+            buildExpression(filterBuilder, Arrays.asList(filter));
+        else {
+            filterBuilder.startAnd();
+            buildArgument(filterBuilder, filter);
+            filterBuilder.end();
+        }
         SearchArgument sarg = filterBuilder.build();
         jobConf.set(SARG_PUSHDOWN, sarg.toKryo());
     }


[07/18] incubator-hawq git commit: HAWQ-964. Ignore Expressions with OR/NOT from partition filtering

Posted by od...@apache.org.
HAWQ-964. Ignore Expressions with OR/NOT from partition filtering


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/93052b93
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/93052b93
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/93052b93

Branch: refs/heads/HAWQ-964
Commit: 93052b93098de971eaaa0ed2c5da03372e201509
Parents: f3668dc
Author: Kavinder Dhaliwal <ka...@gmail.com>
Authored: Tue Sep 20 15:35:08 2016 -0700
Committer: Kavinder Dhaliwal <ka...@gmail.com>
Committed: Tue Sep 20 15:35:08 2016 -0700

----------------------------------------------------------------------
 .../hawq/pxf/plugins/hive/HiveAccessor.java     | 86 +++++++++++++-------
 1 file changed, 57 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/93052b93/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
index 20a1b9f..2f1c26e 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
@@ -21,6 +21,7 @@ package org.apache.hawq.pxf.plugins.hive;
 
 import org.apache.hawq.pxf.api.BasicFilter;
 import org.apache.hawq.pxf.api.FilterParser;
+import org.apache.hawq.pxf.api.LogicalFilter;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
 import org.apache.hawq.pxf.api.utilities.InputData;
 import org.apache.hawq.pxf.plugins.hdfs.HdfsSplittableDataAccessor;
@@ -32,6 +33,7 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
 
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 
@@ -206,51 +208,77 @@ public class HiveAccessor extends HdfsSplittableDataAccessor {
         return testOneFilter(partitionFields, filter, inputData);
     }
 
-    /*
-     * We are testing one filter against all the partition fields. The filter
-     * has the form "fieldA = valueA". The partitions have the form
-     * partitionOne=valueOne/partitionTwo=ValueTwo/partitionThree=valueThree 1.
-     * For a filter to match one of the partitions, lets say partitionA for
-     * example, we need: fieldA = partittionOne and valueA = valueOne. If this
-     * condition occurs, we return true. 2. If fieldA does not match any one of
-     * the partition fields we also return true, it means we ignore this filter
-     * because it is not on a partition field. 3. If fieldA = partittionOne and
-     * valueA != valueOne, then we return false.
-     */
-    private boolean testOneFilter(List<HivePartition> partitionFields,
-                                  Object filter, InputData input) {
-        // Let's look first at the filter
-        BasicFilter bFilter = (BasicFilter) filter;
+    private boolean testForUnsupportedOperators(List<Object> filterList) {
+        boolean nonAndOp = true;
+        for (Object filter : filterList) {
+            if (filter instanceof LogicalFilter) {
+                if (((LogicalFilter) filter).getOperator() != FilterParser.LogicalOperation.HDOP_AND)
+                    return false;
+                if (((LogicalFilter) filter).getFilterList() != null)
+                    nonAndOp = testForUnsupportedOperators(((LogicalFilter) filter).getFilterList());
+            }
+        }
+        return nonAndOp;
+    }
 
-        boolean isFilterOperationEqual = (bFilter.getOperation() == FilterParser.Operation.HDOP_EQ);
-        if (!isFilterOperationEqual) /*
+    private boolean testForPartitionEquality(List<HivePartition> partitionFields, List<Object> filterList, InputData input) {
+        boolean partitionAllowed = true;
+        for (Object filter : filterList) {
+            if (filter instanceof BasicFilter) {
+                BasicFilter bFilter = (BasicFilter) filter;
+                boolean isFilterOperationEqual = (bFilter.getOperation() == FilterParser.Operation.HDOP_EQ);
+                if (!isFilterOperationEqual) /*
                                       * in case this is not an "equality filter"
                                       * we ignore it here - in partition
                                       * filtering
                                       */{
-            return true;
-        }
+                    return true;
+                }
 
-        int filterColumnIndex = bFilter.getColumn().index();
-        String filterValue = bFilter.getConstant().constant().toString();
-        ColumnDescriptor filterColumn = input.getColumn(filterColumnIndex);
-        String filterColumnName = filterColumn.columnName();
+                int filterColumnIndex = bFilter.getColumn().index();
+                String filterValue = bFilter.getConstant().constant().toString();
+                ColumnDescriptor filterColumn = input.getColumn(filterColumnIndex);
+                String filterColumnName = filterColumn.columnName();
+
+                for (HivePartition partition : partitionFields) {
+                    if (filterColumnName.equals(partition.name)) {
 
-        for (HivePartition partition : partitionFields) {
-            if (filterColumnName.equals(partition.name)) {
                 /*
                  * the filter field matches a partition field, but the values do
                  * not match
                  */
-                return filterValue.equals(partition.val);
-            }
-        }
+                        return filterValue.equals(partition.val);
+                    }
+                }
 
         /*
          * filter field did not match any partition field, so we ignore this
          * filter and hence return true
          */
-        return true;
+            } else if (filter instanceof LogicalFilter) {
+                partitionAllowed = testForPartitionEquality(partitionFields, ((LogicalFilter) filter).getFilterList(), input);
+            }
+        }
+        return partitionAllowed;
+    }
+    /*
+     * We are testing one filter against all the partition fields. The filter
+     * has the form "fieldA = valueA". The partitions have the form
+     * partitionOne=valueOne/partitionTwo=ValueTwo/partitionThree=valueThree 1.
+     * For a filter to match one of the partitions, lets say partitionA for
+     * example, we need: fieldA = partittionOne and valueA = valueOne. If this
+     * condition occurs, we return true. 2. If fieldA does not match any one of
+     * the partition fields we also return true, it means we ignore this filter
+     * because it is not on a partition field. 3. If fieldA = partittionOne and
+     * valueA != valueOne, then we return false.
+     */
+    private boolean testOneFilter(List<HivePartition> partitionFields,
+                                  Object filter, InputData input) {
+        // Let's look first at the filter and escape if there are any OR or NOT ops
+        if (!testForUnsupportedOperators(Arrays.asList(filter)))
+            return false;
+
+        return testForPartitionEquality(partitionFields, Arrays.asList(filter), input);
     }
 
     private void printOneBasicFilter(Object filter) {


[08/18] incubator-hawq git commit: HAWQ-1048. Return NULL instead of empty string.

Posted by od...@apache.org.
HAWQ-1048. Return NULL instead of empty string.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/0ff70373
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/0ff70373
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/0ff70373

Branch: refs/heads/HAWQ-964
Commit: 0ff70373b3b700028892c72fd9e1fb516bbcaa25
Parents: 7a9cc88
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Wed Sep 21 18:19:18 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Wed Sep 21 18:19:18 2016 -0700

----------------------------------------------------------------------
 src/backend/access/external/pxffilters.c | 6 ++++++
 1 file changed, 6 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0ff70373/src/backend/access/external/pxffilters.c
----------------------------------------------------------------------
diff --git a/src/backend/access/external/pxffilters.c b/src/backend/access/external/pxffilters.c
index 0d1e1fa..8c3864d 100644
--- a/src/backend/access/external/pxffilters.c
+++ b/src/backend/access/external/pxffilters.c
@@ -317,6 +317,12 @@ pxf_serialize_filter_list(List *expressionItems)
 		}
 	}
 
+	if (resbuf->len == 0)
+	{
+		pfree(resbuf->data);
+		return NULL;
+	}
+
 	return resbuf->data;
 }
 


[16/18] incubator-hawq git commit: HAWQ-1048. Fixed unit-tests.

Posted by od...@apache.org.
HAWQ-1048. Fixed unit-tests.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/dffc760d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/dffc760d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/dffc760d

Branch: refs/heads/HAWQ-964
Commit: dffc760de7c1ed9ec1a40e5e94d16be202e75b97
Parents: 38cb4b0
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Mon Oct 3 13:17:10 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Mon Oct 3 13:17:10 2016 -0700

----------------------------------------------------------------------
 src/backend/access/external/pxffilters.c        |  7 +++++++
 .../access/external/test/pxffilters_test.c      | 20 +++++++++++++++++++-
 .../access/external/test/pxfheaders_test.c      |  9 +++++++++
 .../utils/mmgr/test/memaccounting_test.c        |  2 ++
 4 files changed, 37 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dffc760d/src/backend/access/external/pxffilters.c
----------------------------------------------------------------------
diff --git a/src/backend/access/external/pxffilters.c b/src/backend/access/external/pxffilters.c
index 4c28bc1..2ada44e 100644
--- a/src/backend/access/external/pxffilters.c
+++ b/src/backend/access/external/pxffilters.c
@@ -321,6 +321,9 @@ pxf_free_filter(PxfFilterDesc* filter)
 static char *
 pxf_serialize_filter_list(List *expressionItems)
 {
+
+	printf("Serializing filter list 1\n");
+
 	StringInfo	 resbuf;
 	ListCell	*lc = NULL;
 
@@ -330,6 +333,8 @@ pxf_serialize_filter_list(List *expressionItems)
 	resbuf = makeStringInfo();
 	initStringInfo(resbuf);
 
+	printf("Serializing filter list 2\n");
+
 	/*
 	 * Iterate through the expression items in the list and serialize them one after the other.
 	 */
@@ -393,6 +398,8 @@ pxf_serialize_filter_list(List *expressionItems)
 		}
 	}
 
+	printf("Serializing filter list 30\n");
+
 	if (resbuf->len == 0)
 	{
 		pfree(resbuf->data);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dffc760d/src/backend/access/external/test/pxffilters_test.c
----------------------------------------------------------------------
diff --git a/src/backend/access/external/test/pxffilters_test.c b/src/backend/access/external/test/pxffilters_test.c
index 65b1aef..c1c54cd 100644
--- a/src/backend/access/external/test/pxffilters_test.c
+++ b/src/backend/access/external/test/pxffilters_test.c
@@ -25,6 +25,9 @@
 #include "c.h"
 #include "../pxffilters.c"
 
+void run__const_to_str(Const* input, StringInfo result, char* expected);
+void run__const_to_str__negative(Const* input, StringInfo result, char* value);
+
 void
 test__supported_filter_type(void **state)
 {
@@ -62,7 +65,7 @@ test__supported_filter_type(void **state)
 
 	/* go over pxf_supported_types array */
 	int nargs = sizeof(pxf_supported_types) / sizeof(Oid);
-	assert_int_equal(nargs, 13);
+	assert_int_equal(nargs, 14);
 	for (i = 0; i < nargs; ++i)
 	{
 		assert_true(supported_filter_type(pxf_supported_types[i]));
@@ -475,14 +478,29 @@ test__opexpr_to_pxffilter__unsupportedOpNot(void **state)
 void
 test__pxf_serialize_filter_list__oneFilter(void **state)
 {
+
+	printf("Entered one filter test 1\n");
+
+	 int c = 1, d = 1, f = 1;
+
+	   for ( c = 1 ; c <= 32767 ; c++ )
+	       for ( d = 1 ; d <= 32767 ; d++ )
+	    	   for ( f = 1 ; f <= 10 ; f++ )
+	       {}
+
 	List* filter_list = NIL;
 
+	printf("Entered one filter test 2\n");
+
 	PxfFilterDesc* filter = build_filter(
 			PXF_ATTR_CODE, 1, NULL,
 			PXF_CONST_CODE, 0, "1984",
 			PXFOP_GT);
+
+	printf("Entered one filter test 3\n");
 	filter_list = lappend(filter_list, filter);
 
+
 	char* result = pxf_serialize_filter_list(filter_list);
 	assert_string_equal(result, "a0c1984o2");
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dffc760d/src/backend/access/external/test/pxfheaders_test.c
----------------------------------------------------------------------
diff --git a/src/backend/access/external/test/pxfheaders_test.c b/src/backend/access/external/test/pxfheaders_test.c
index 7c79b5a..454ecdc 100644
--- a/src/backend/access/external/test/pxfheaders_test.c
+++ b/src/backend/access/external/test/pxfheaders_test.c
@@ -32,6 +32,15 @@ static extvar_t *mock_extvar = NULL;
 static char *old_pxf_remote_service_login = NULL;
 static char *old_pxf_remote_service_secret = NULL;
 
+void expect_churl_headers(const char *key, const char *value);
+void expect_churl_headers_alignment();
+void store_gucs();
+void setup_gphd_uri();
+void setup_input_data();
+void setup_external_vars();
+void expect_external_vars();
+void restore_gucs();
+
 void
 test__build_http_header__remote_login_is_null(void **state)
 {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dffc760d/src/backend/utils/mmgr/test/memaccounting_test.c
----------------------------------------------------------------------
diff --git a/src/backend/utils/mmgr/test/memaccounting_test.c b/src/backend/utils/mmgr/test/memaccounting_test.c
index a946040..841d171 100644
--- a/src/backend/utils/mmgr/test/memaccounting_test.c
+++ b/src/backend/utils/mmgr/test/memaccounting_test.c
@@ -40,6 +40,8 @@
 #define AllocPointerGetChunk(ptr)	\
 					((StandardChunkHeader *)(((char *)(ptr)) - ALLOC_CHUNKHDRSZ))
 
+void write_stderr_mock(const char *fmt,...);
+
 static StringInfoData outputBuffer;
 
 /* We will capture write_stderr output using write_stderr_mock */


[11/18] incubator-hawq git commit: HAWQ-1048. Added filter-push down support for float8.

Posted by od...@apache.org.
 HAWQ-1048. Added filter-push down support for float8.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/98bc759f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/98bc759f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/98bc759f

Branch: refs/heads/HAWQ-964
Commit: 98bc759fe34363121049fd2f08ed6d72679082b9
Parents: 694ad03
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Thu Sep 22 13:34:20 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Thu Sep 22 13:34:20 2016 -0700

----------------------------------------------------------------------
 src/backend/access/external/pxffilters.c | 11 +++++++++--
 1 file changed, 9 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/98bc759f/src/backend/access/external/pxffilters.c
----------------------------------------------------------------------
diff --git a/src/backend/access/external/pxffilters.c b/src/backend/access/external/pxffilters.c
index 847f120..20956bf 100644
--- a/src/backend/access/external/pxffilters.c
+++ b/src/backend/access/external/pxffilters.c
@@ -134,8 +134,15 @@ dbop_pxfop_map pxf_supported_opr[] =
 	{1097 /* date_gt */, PXFOP_GT},
 	{1096 /* date_le */, PXFOP_LE},
 	{1098 /* date_ge */, PXFOP_GE},
-	{1094 /* date_ne */, PXFOP_NE}
-
+	{1094 /* date_ne */, PXFOP_NE},
+
+	/* float8 */
+	{Float8EqualOperator  /* float8eq */, PXFOP_EQ},
+	{672  /* float8lt */, PXFOP_LT},
+	{674 /* float8gt */, PXFOP_GT},
+	{673 /* float8le */, PXFOP_LE},
+	{675 /* float8ge */, PXFOP_GE},
+	{671 /* float8ne */, PXFOP_NE}
 };
 
 Oid pxf_supported_types[] =


[12/18] incubator-hawq git commit: HAWQ-1048. Added filter-push down support for bpchar, bytea.

Posted by od...@apache.org.
 HAWQ-1048. Added filter-push down support for bpchar, bytea.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/d1873951
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/d1873951
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/d1873951

Branch: refs/heads/HAWQ-964
Commit: d1873951dbca3928fef948044a948d5477ae7fe9
Parents: 98bc759
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Thu Sep 22 16:45:57 2016 -0700
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Thu Sep 22 16:45:57 2016 -0700

----------------------------------------------------------------------
 src/backend/access/external/pxffilters.c | 32 +++++++++++++++++++++++----
 1 file changed, 28 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/d1873951/src/backend/access/external/pxffilters.c
----------------------------------------------------------------------
diff --git a/src/backend/access/external/pxffilters.c b/src/backend/access/external/pxffilters.c
index 20956bf..17e184a 100644
--- a/src/backend/access/external/pxffilters.c
+++ b/src/backend/access/external/pxffilters.c
@@ -142,7 +142,24 @@ dbop_pxfop_map pxf_supported_opr[] =
 	{674 /* float8gt */, PXFOP_GT},
 	{673 /* float8le */, PXFOP_LE},
 	{675 /* float8ge */, PXFOP_GE},
-	{671 /* float8ne */, PXFOP_NE}
+	{671 /* float8ne */, PXFOP_NE},
+
+	/* bpchar */
+	{BPCharEqualOperator  /* bpchareq */, PXFOP_EQ},
+	{1058  /* bpcharlt */, PXFOP_LT},
+	{1060 /* bpchargt */, PXFOP_GT},
+	{1059 /* bpcharle */, PXFOP_LE},
+	{1061 /* bpcharge */, PXFOP_GE},
+	{1057 /* bpcharne */, PXFOP_NE},
+
+	/* bytea */
+	{ByteaEqualOperator  /* byteaeq */, PXFOP_EQ},
+	{1957  /* bytealt */, PXFOP_LT},
+	{1959 /* byteagt */, PXFOP_GT},
+	{1958 /* byteale */, PXFOP_LE},
+	{1960 /* byteage */, PXFOP_GE},
+	{1956 /* byteane */, PXFOP_NE}
+
 };
 
 Oid pxf_supported_types[] =
@@ -159,7 +176,8 @@ Oid pxf_supported_types[] =
 	CHAROID,
 	BYTEAOID,
 	BOOLOID,
-	DATEOID
+	DATEOID,
+	TIMESTAMPOID
 };
 
 /*
@@ -310,7 +328,7 @@ pxf_serialize_filter_list(List *expressionItems)
 					pxf_free_filter(filter);
 				} else {
 					/* if at least one expression item is not supported, whole filter doesn't make sense*/
-					elog(INFO, "Query will not be optimized to use filter push-down.");
+					elog(DEBUG1, "Query will not be optimized to use filter push-down.");
 					pfree(filter);
 					pfree(resbuf->data);
 					return NULL;
@@ -411,6 +429,7 @@ opexpr_to_pxffilter(OpExpr *expr, PxfFilterDesc *filter)
 	}
 	else
 	{
+		elog(DEBUG1, "opexpr_to_pxffilter: expression is not a Var+Const");
 		return false;
 	}
 
@@ -426,9 +445,10 @@ opexpr_to_pxffilter(OpExpr *expr, PxfFilterDesc *filter)
 		}
 	}
 
+	elog(DEBUG1, "opexpr_to_pxffilter: operator is not supported, operator code: %d", expr->opno);
+
 	/* NOTE: if more validation needed, add it before the operators test
 	 * or alternatively change it to use a false flag and return true below */
-
 	return false;
 }
 
@@ -510,6 +530,9 @@ supported_filter_type(Oid type)
 		if (type == pxf_supported_types[i])
 			return true;
 	}
+
+	elog(DEBUG1, "supported_filter_type: filter pushdown is not supported for datatype oid: %d", type);
+
 	return false;
 }
 
@@ -555,6 +578,7 @@ const_to_str(Const *constval, StringInfo buf)
 		case CHAROID:
 		case BYTEAOID:
 		case DATEOID:
+		case TIMESTAMPOID:
 			appendStringInfo(buf, "\\\"%s\\\"", extval);
 			break;