You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by rl...@apache.org on 2017/09/23 09:32:37 UTC
[3/4] incubator-hawq git commit: HAWQ-1198. Fix to filter out
irrelevant fragments while accessing Hive.
HAWQ-1198. Fix to filter out irrelevant fragments while accessing Hive.
Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/dcdeb137
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/dcdeb137
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/dcdeb137
Branch: refs/heads/hdb-2.3.0.1
Commit: dcdeb1379dc4ebb2fcbef0b6016c5a5c22a5c824
Parents: fd9073c
Author: Shubham Sharma <ss...@pivotal.io>
Authored: Thu Sep 14 16:24:27 2017 -0700
Committer: wcl14 <wa...@126.com>
Committed: Fri Sep 15 17:36:26 2017 +0800
----------------------------------------------------------------------
.../pxf/plugins/hive/HiveDataFragmenter.java | 16 ++-
.../plugins/hive/HiveDataFragmenterTest.java | 111 +++++++++++++++++++
2 files changed, 118 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dcdeb137/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
index 6e193c2..c24b552 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
@@ -402,17 +402,11 @@ public class HiveDataFragmenter extends Fragmenter {
// Let's look first at the filter
BasicFilter bFilter = (BasicFilter) filter;
- // In case this is not an "equality filter", we ignore this filter (no
- // add to filter list)
- if (!(bFilter.getOperation() == FilterParser.Operation.HDOP_EQ)) {
- LOG.debug("Filter operator is not EQ, ignore this filter for hive : "
- + filter);
- return false;
- }
-
// Extract column name and value
int filterColumnIndex = bFilter.getColumn().index();
- String filterValue = bFilter.getConstant().constant().toString();
+ // Avoids NullPointerException in case of operations like HDOP_IS_NULL,
+ // HDOP_IS_NOT_NULL where no constant value is passed as part of query
+ String filterValue = bFilter.getConstant()!= null ? bFilter.getConstant().constant().toString() : "";
ColumnDescriptor filterColumn = inputData.getColumn(filterColumnIndex);
String filterColumnName = filterColumn.columnName();
@@ -453,6 +447,10 @@ public class HiveDataFragmenter extends Fragmenter {
case HDOP_NE:
filtersString.append(HIVE_API_NE);
break;
+ default:
+ // Set filter string to blank in case of unimplemented operations
+ filtersString.setLength(0);
+ return false;
}
filtersString.append(HIVE_API_DQUOTE);
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dcdeb137/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenterTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenterTest.java b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenterTest.java
index f93f7ba..2c28500 100755
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenterTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenterTest.java
@@ -20,7 +20,11 @@ package org.apache.hawq.pxf.plugins.hive;
*/
+import org.apache.hawq.pxf.api.FilterParser;
import org.apache.hawq.pxf.api.utilities.InputData;
+import org.apache.hawq.pxf.api.BasicFilter;
+import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
+import static org.apache.hawq.pxf.api.FilterParser.Operation.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -39,6 +43,10 @@ import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
import org.powermock.modules.junit4.PowerMockRunner;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.util.*;
+
@RunWith(PowerMockRunner.class)
@PrepareForTest({HiveDataFragmenter.class}) // Enables mocking 'new' calls
@SuppressStaticInitializationFor({"org.apache.hadoop.mapred.JobConf",
@@ -88,6 +96,109 @@ public class HiveDataFragmenterTest {
}
}
+ @Test
+ public void testBuildSingleFilter() throws Exception {
+ prepareConstruction();
+ fragmenter = new HiveDataFragmenter(inputData);
+ ColumnDescriptor columnDescriptor =
+ new ColumnDescriptor("textColumn", 25, 3, "text", null,true);
+ String filterColumnName=columnDescriptor.columnName();
+ int filterColumnIndex = columnDescriptor.columnIndex();
+ HiveFilterBuilder builder = new HiveFilterBuilder(null);
+ when(inputData.getColumn(filterColumnIndex)).thenReturn(columnDescriptor);
+
+ // Mock private field partitionkeyTypes
+ Field partitionkeyTypes = PowerMockito.field(HiveDataFragmenter.class, "partitionkeyTypes");
+ Map<String, String> localpartitionkeyTypes = new HashMap<>();
+ localpartitionkeyTypes.put(filterColumnName,"string");
+ partitionkeyTypes.set(fragmenter,localpartitionkeyTypes);
+
+ //Mock private field setPartitions
+ Field setPartitions = PowerMockito.field(HiveDataFragmenter.class, "setPartitions");
+ Set<String> localSetPartitions = new TreeSet<String>(
+ String.CASE_INSENSITIVE_ORDER);
+ localSetPartitions.add(filterColumnName);
+ setPartitions.set(fragmenter,localSetPartitions);
+
+ Map<FilterParser.Operation, String> filterStrings = new HashMap<>();
+ /*
+ * Filter string representation in the respective order of their declaration
+ testColumn != 2016-01-03
+ testColumn = 2016-01-03
+ testColumn >= 2016-01-03
+ testColumn <= 2016-01-03
+ testColumn >= 2016-01-03
+ testColumn < 2016-01-03
+ testColumn like '2016-01-0%'
+ */
+ filterStrings.put(HDOP_NE, "a3c25s10d2016-01-03o6");
+ filterStrings.put(HDOP_EQ, "a3c25s10d2016-01-03o5");
+ filterStrings.put(HDOP_GE, "a3c25s10d2016-01-03o4");
+ filterStrings.put(HDOP_LE, "a3c25s10d2016-01-03o3");
+ filterStrings.put(HDOP_GT, "a3c25s10d2016-01-03o2");
+ filterStrings.put(HDOP_LT, "a3c25s10d2016-01-03o1");
+ filterStrings.put(HDOP_LIKE, "a3c25s10d2016-01-0%o7");
+
+ for (FilterParser.Operation operation : filterStrings.keySet()){
+ BasicFilter bFilter = (BasicFilter) builder.getFilterObject(filterStrings.get(operation));
+ checkFilters(fragmenter,bFilter, operation);
+ }
+ }
+
+ private void checkFilters(HiveDataFragmenter fragmenter, BasicFilter bFilter, FilterParser.Operation operation)
+ throws Exception{
+
+ String prefix="";
+ StringBuilder localFilterString = new StringBuilder();
+ String expectedResult;
+
+ // Mock private method buildSingleFilter
+ Method method = PowerMockito.method(HiveDataFragmenter.class, "buildSingleFilter",
+ new Class[]{Object.class,StringBuilder.class,String.class});
+ boolean result = (Boolean)method.invoke(fragmenter, new Object[]{bFilter,localFilterString,prefix});
+
+ switch (operation){
+ case HDOP_NE:
+ expectedResult = "textColumn != \"2016-01-03\"";
+ assertTrue(result);
+ assertEquals(expectedResult,localFilterString.toString());
+ break;
+ case HDOP_EQ:
+ expectedResult = "textColumn = \"2016-01-03\"";
+ assertTrue(result);
+ assertEquals(expectedResult,localFilterString.toString());
+ break;
+ case HDOP_GE:
+ expectedResult = "textColumn >= \"2016-01-03\"";
+ assertTrue(result);
+ assertEquals(expectedResult,localFilterString.toString());
+ break;
+ case HDOP_LE:
+ expectedResult = "textColumn <= \"2016-01-03\"";
+ assertTrue(result);
+ assertEquals(expectedResult,localFilterString.toString());
+ break;
+ case HDOP_GT:
+ expectedResult = "textColumn > \"2016-01-03\"";
+ assertTrue(result);
+ assertEquals(expectedResult,localFilterString.toString());
+ break;
+ case HDOP_LT:
+ expectedResult = "textColumn < \"2016-01-03\"";
+ assertTrue(result);
+ assertEquals(expectedResult,localFilterString.toString());
+ break;
+ case HDOP_LIKE:
+ expectedResult = "";
+ assertFalse(result);
+ assertEquals(expectedResult,localFilterString.toString());
+ break;
+ default:
+ assertFalse(result);
+ break;
+ }
+ }
+
private void prepareConstruction() throws Exception {
inputData = mock(InputData.class);