You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kh...@apache.org on 2014/08/20 00:41:13 UTC
svn commit: r1619005 [7/9] - in /hive/trunk: ./ accumulo-handler/
accumulo-handler/src/ accumulo-handler/src/java/
accumulo-handler/src/java/org/ accumulo-handler/src/java/org/apache/
accumulo-handler/src/java/org/apache/hadoop/ accumulo-handler/src/ja...
Added: hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java
URL: http://svn.apache.org/viewvc/hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java?rev=1619005&view=auto
==============================================================================
--- hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java (added)
+++ hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java Tue Aug 19 22:41:10 2014
@@ -0,0 +1,467 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.accumulo.predicate;
+
+import static org.junit.Assert.assertNotNull;
+
+import java.sql.Date;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.accumulo.core.data.Key;
+import org.apache.accumulo.core.data.Range;
+import org.apache.hadoop.hive.accumulo.AccumuloHiveConstants;
+import org.apache.hadoop.hive.accumulo.columns.ColumnEncoding;
+import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloRowIdColumnMapping;
+import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
+import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
+import org.apache.hadoop.hive.ql.lib.Dispatcher;
+import org.apache.hadoop.hive.ql.lib.GraphWalker;
+import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hadoop.hive.ql.lib.NodeProcessor;
+import org.apache.hadoop.hive.ql.lib.Rule;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.udf.UDFLike;
+import org.apache.hadoop.hive.ql.udf.UDFToString;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+/**
+ *
+ */
+public class TestAccumuloRangeGenerator {
+
+ private AccumuloPredicateHandler handler;
+ private HiveAccumuloRowIdColumnMapping rowIdMapping;
+
+ @Before
+ public void setup() {
+ handler = AccumuloPredicateHandler.getInstance();
+ rowIdMapping = new HiveAccumuloRowIdColumnMapping(AccumuloHiveConstants.ROWID,
+ ColumnEncoding.STRING, "row", TypeInfoFactory.stringTypeInfo.toString());
+ }
+
+ @Test
+ public void testRangeConjunction() throws Exception {
+ // rowId >= 'f'
+ ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
+ ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "f");
+ List<ExprNodeDesc> children = Lists.newArrayList();
+ children.add(column);
+ children.add(constant);
+ ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrGreaterThan(), children);
+ assertNotNull(node);
+
+ // rowId <= 'm'
+ ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null,
+ false);
+ ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "m");
+ List<ExprNodeDesc> children2 = Lists.newArrayList();
+ children2.add(column2);
+ children2.add(constant2);
+ ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrLessThan(), children2);
+ assertNotNull(node2);
+
+ // And UDF
+ List<ExprNodeDesc> bothFilters = Lists.newArrayList();
+ bothFilters.add(node);
+ bothFilters.add(node2);
+ ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPAnd(), bothFilters);
+
+ // Should generate [f,m]
+ List<Range> expectedRanges = Arrays
+ .asList(new Range(new Key("f"), true, new Key("m\0"), false));
+
+ AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(handler, rowIdMapping, "rid");
+ Dispatcher disp = new DefaultRuleDispatcher(rangeGenerator,
+ Collections.<Rule,NodeProcessor> emptyMap(), null);
+ GraphWalker ogw = new DefaultGraphWalker(disp);
+ ArrayList<Node> topNodes = new ArrayList<Node>();
+ topNodes.add(both);
+ HashMap<Node,Object> nodeOutput = new HashMap<Node,Object>();
+
+ try {
+ ogw.startWalking(topNodes, nodeOutput);
+ } catch (SemanticException ex) {
+ throw new RuntimeException(ex);
+ }
+
+ Object result = nodeOutput.get(both);
+ Assert.assertNotNull(result);
+ Assert.assertTrue("Result from graph walk was not a List", result instanceof List);
+ @SuppressWarnings("unchecked")
+ List<Range> actualRanges = (List<Range>) result;
+ Assert.assertEquals(expectedRanges, actualRanges);
+ }
+
+ @Test
+ public void testRangeDisjunction() throws Exception {
+ // rowId >= 'f'
+ ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
+ ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "f");
+ List<ExprNodeDesc> children = Lists.newArrayList();
+ children.add(column);
+ children.add(constant);
+ ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrGreaterThan(), children);
+ assertNotNull(node);
+
+ // rowId <= 'm'
+ ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null,
+ false);
+ ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "m");
+ List<ExprNodeDesc> children2 = Lists.newArrayList();
+ children2.add(column2);
+ children2.add(constant2);
+ ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrLessThan(), children2);
+ assertNotNull(node2);
+
+ // Or UDF
+ List<ExprNodeDesc> bothFilters = Lists.newArrayList();
+ bothFilters.add(node);
+ bothFilters.add(node2);
+ ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPOr(), bothFilters);
+
+ // Should generate (-inf,+inf)
+ List<Range> expectedRanges = Arrays.asList(new Range());
+
+ AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(handler, rowIdMapping, "rid");
+ Dispatcher disp = new DefaultRuleDispatcher(rangeGenerator,
+ Collections.<Rule,NodeProcessor> emptyMap(), null);
+ GraphWalker ogw = new DefaultGraphWalker(disp);
+ ArrayList<Node> topNodes = new ArrayList<Node>();
+ topNodes.add(both);
+ HashMap<Node,Object> nodeOutput = new HashMap<Node,Object>();
+
+ try {
+ ogw.startWalking(topNodes, nodeOutput);
+ } catch (SemanticException ex) {
+ throw new RuntimeException(ex);
+ }
+
+ Object result = nodeOutput.get(both);
+ Assert.assertNotNull(result);
+ Assert.assertTrue("Result from graph walk was not a List", result instanceof List);
+ @SuppressWarnings("unchecked")
+ List<Range> actualRanges = (List<Range>) result;
+ Assert.assertEquals(expectedRanges, actualRanges);
+ }
+
+ @Test
+ public void testRangeConjunctionWithDisjunction() throws Exception {
+ // rowId >= 'h'
+ ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
+ ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "h");
+ List<ExprNodeDesc> children = Lists.newArrayList();
+ children.add(column);
+ children.add(constant);
+ ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrGreaterThan(), children);
+ assertNotNull(node);
+
+ // rowId <= 'd'
+ ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null,
+ false);
+ ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "d");
+ List<ExprNodeDesc> children2 = Lists.newArrayList();
+ children2.add(column2);
+ children2.add(constant2);
+ ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrLessThan(), children2);
+ assertNotNull(node2);
+
+ // rowId >= 'q'
+ ExprNodeDesc column3 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null,
+ false);
+ ExprNodeDesc constant3 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "q");
+ List<ExprNodeDesc> children3 = Lists.newArrayList();
+ children3.add(column3);
+ children3.add(constant3);
+ ExprNodeDesc node3 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrGreaterThan(), children3);
+ assertNotNull(node3);
+
+ // Or UDF, (rowId <= 'd' or rowId >= 'q')
+ List<ExprNodeDesc> orFilters = Lists.newArrayList();
+ orFilters.add(node2);
+ orFilters.add(node3);
+ ExprNodeGenericFuncDesc orNode = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPOr(), orFilters);
+
+ // And UDF, (rowId >= 'h' and (rowId <= 'd' or rowId >= 'q'))
+ List<ExprNodeDesc> andFilters = Lists.newArrayList();
+ andFilters.add(node);
+ andFilters.add(orNode);
+ ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPAnd(), andFilters);
+
+ // Should generate ['q', +inf)
+ List<Range> expectedRanges = Arrays.asList(new Range(new Key("q"), true, null, false));
+
+ AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(handler, rowIdMapping, "rid");
+ Dispatcher disp = new DefaultRuleDispatcher(rangeGenerator,
+ Collections.<Rule,NodeProcessor> emptyMap(), null);
+ GraphWalker ogw = new DefaultGraphWalker(disp);
+ ArrayList<Node> topNodes = new ArrayList<Node>();
+ topNodes.add(both);
+ HashMap<Node,Object> nodeOutput = new HashMap<Node,Object>();
+
+ try {
+ ogw.startWalking(topNodes, nodeOutput);
+ } catch (SemanticException ex) {
+ throw new RuntimeException(ex);
+ }
+
+ Object result = nodeOutput.get(both);
+ Assert.assertNotNull(result);
+ Assert.assertTrue("Result from graph walk was not a List", result instanceof List);
+ @SuppressWarnings("unchecked")
+ List<Range> actualRanges = (List<Range>) result;
+ Assert.assertEquals(expectedRanges, actualRanges);
+ }
+
+ @Test
+ public void testPartialRangeConjunction() throws Exception {
+ // rowId >= 'f'
+ ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
+ ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "f");
+ List<ExprNodeDesc> children = Lists.newArrayList();
+ children.add(column);
+ children.add(constant);
+ ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrGreaterThan(), children);
+ assertNotNull(node);
+
+ // anythingElse <= 'foo'
+ ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "anythingElse",
+ null, false);
+ ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "foo");
+ List<ExprNodeDesc> children2 = Lists.newArrayList();
+ children2.add(column2);
+ children2.add(constant2);
+ ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrLessThan(), children2);
+ assertNotNull(node2);
+
+ // And UDF
+ List<ExprNodeDesc> bothFilters = Lists.newArrayList();
+ bothFilters.add(node);
+ bothFilters.add(node2);
+ ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPAnd(), bothFilters);
+
+ // Should generate [f,+inf)
+ List<Range> expectedRanges = Arrays.asList(new Range(new Key("f"), true, null, false));
+
+ AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(handler, rowIdMapping, "rid");
+ Dispatcher disp = new DefaultRuleDispatcher(rangeGenerator,
+ Collections.<Rule,NodeProcessor> emptyMap(), null);
+ GraphWalker ogw = new DefaultGraphWalker(disp);
+ ArrayList<Node> topNodes = new ArrayList<Node>();
+ topNodes.add(both);
+ HashMap<Node,Object> nodeOutput = new HashMap<Node,Object>();
+
+ try {
+ ogw.startWalking(topNodes, nodeOutput);
+ } catch (SemanticException ex) {
+ throw new RuntimeException(ex);
+ }
+
+ Object result = nodeOutput.get(both);
+ Assert.assertNotNull(result);
+ Assert.assertTrue("Result from graph walk was not a List", result instanceof List);
+ @SuppressWarnings("unchecked")
+ List<Range> actualRanges = (List<Range>) result;
+ Assert.assertEquals(expectedRanges, actualRanges);
+ }
+
+ @Test
+ public void testDateRangeConjunction() throws Exception {
+ // rowId >= '2014-01-01'
+ ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
+ ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.dateTypeInfo,
+ Date.valueOf("2014-01-01"));
+ List<ExprNodeDesc> children = Lists.newArrayList();
+ children.add(column);
+ children.add(constant);
+ ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrGreaterThan(), children);
+ assertNotNull(node);
+
+ // rowId <= '2014-07-01'
+ ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null,
+ false);
+ ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.dateTypeInfo,
+ Date.valueOf("2014-07-01"));
+ List<ExprNodeDesc> children2 = Lists.newArrayList();
+ children2.add(column2);
+ children2.add(constant2);
+ ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPLessThan(), children2);
+ assertNotNull(node2);
+
+ // And UDF
+ List<ExprNodeDesc> bothFilters = Lists.newArrayList();
+ bothFilters.add(node);
+ bothFilters.add(node2);
+ ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPAnd(), bothFilters);
+
+ // Should generate [2014-01-01, 2014-07-01)
+ List<Range> expectedRanges = Arrays.asList(new Range(new Key("2014-01-01"), true, new Key(
+ "2014-07-01"), false));
+
+ AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(handler, rowIdMapping, "rid");
+ Dispatcher disp = new DefaultRuleDispatcher(rangeGenerator,
+ Collections.<Rule,NodeProcessor> emptyMap(), null);
+ GraphWalker ogw = new DefaultGraphWalker(disp);
+ ArrayList<Node> topNodes = new ArrayList<Node>();
+ topNodes.add(both);
+ HashMap<Node,Object> nodeOutput = new HashMap<Node,Object>();
+
+ try {
+ ogw.startWalking(topNodes, nodeOutput);
+ } catch (SemanticException ex) {
+ throw new RuntimeException(ex);
+ }
+
+ Object result = nodeOutput.get(both);
+ Assert.assertNotNull(result);
+ Assert.assertTrue("Result from graph walk was not a List", result instanceof List);
+ @SuppressWarnings("unchecked")
+ List<Range> actualRanges = (List<Range>) result;
+ Assert.assertEquals(expectedRanges, actualRanges);
+ }
+
+ @Test
+ public void testCastExpression() throws Exception {
+ // 40 and 50
+ ExprNodeDesc fourty = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo,
+ 40), fifty = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 50);
+
+ // +
+ GenericUDFOPPlus plus = new GenericUDFOPPlus();
+
+ // 40 + 50
+ ExprNodeGenericFuncDesc addition = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, plus, Arrays.asList(fourty, fifty));
+
+ // cast(.... as string)
+ UDFToString stringCast = new UDFToString();
+ GenericUDFBridge stringCastBridge = new GenericUDFBridge("cast", false, stringCast.getClass().getName());
+
+ // cast (40 + 50 as string)
+ ExprNodeGenericFuncDesc cast = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ stringCastBridge, "cast", Collections.<ExprNodeDesc> singletonList(addition));
+
+ ExprNodeDesc key = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "key", null,
+ false);
+
+ ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrGreaterThan(), Arrays.asList(key, cast));
+
+ AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(handler, rowIdMapping, "key");
+ Dispatcher disp = new DefaultRuleDispatcher(rangeGenerator,
+ Collections.<Rule,NodeProcessor> emptyMap(), null);
+ GraphWalker ogw = new DefaultGraphWalker(disp);
+ ArrayList<Node> topNodes = new ArrayList<Node>();
+ topNodes.add(node);
+ HashMap<Node,Object> nodeOutput = new HashMap<Node,Object>();
+
+ try {
+ ogw.startWalking(topNodes, nodeOutput);
+ } catch (SemanticException ex) {
+ throw new RuntimeException(ex);
+ }
+
+ // Don't fail -- would be better to actually compute a range of [90,+inf)
+ Object result = nodeOutput.get(node);
+ Assert.assertNull(result);
+ }
+
+ @Test
+ public void testRangeOverNonRowIdField() throws Exception {
+ // foo >= 'f'
+ ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "foo", null, false);
+ ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "f");
+ List<ExprNodeDesc> children = Lists.newArrayList();
+ children.add(column);
+ children.add(constant);
+ ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrGreaterThan(), children);
+ assertNotNull(node);
+
+ // foo <= 'm'
+ ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "foo", null,
+ false);
+ ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "m");
+ List<ExprNodeDesc> children2 = Lists.newArrayList();
+ children2.add(column2);
+ children2.add(constant2);
+ ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPEqualOrLessThan(), children2);
+ assertNotNull(node2);
+
+ // And UDF
+ List<ExprNodeDesc> bothFilters = Lists.newArrayList();
+ bothFilters.add(node);
+ bothFilters.add(node2);
+ ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
+ new GenericUDFOPAnd(), bothFilters);
+
+ AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(handler, rowIdMapping, "rid");
+ Dispatcher disp = new DefaultRuleDispatcher(rangeGenerator,
+ Collections.<Rule,NodeProcessor> emptyMap(), null);
+ GraphWalker ogw = new DefaultGraphWalker(disp);
+ ArrayList<Node> topNodes = new ArrayList<Node>();
+ topNodes.add(both);
+ HashMap<Node,Object> nodeOutput = new HashMap<Node,Object>();
+
+ try {
+ ogw.startWalking(topNodes, nodeOutput);
+ } catch (SemanticException ex) {
+ throw new RuntimeException(ex);
+ }
+
+ // Filters are not over the rowid, therefore scan everything
+ Object result = nodeOutput.get(both);
+ Assert.assertNull(result);
+ }
+}
Added: hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestPrimitiveComparisonFilter.java
URL: http://svn.apache.org/viewvc/hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestPrimitiveComparisonFilter.java?rev=1619005&view=auto
==============================================================================
--- hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestPrimitiveComparisonFilter.java (added)
+++ hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestPrimitiveComparisonFilter.java Tue Aug 19 22:41:10 2014
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.accumulo.predicate;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.io.IntWritable;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class TestPrimitiveComparisonFilter {
+
+ @Test
+ public void testBase64ConstantEncode() {
+ PrimitiveComparisonFilter filter = new PrimitiveComparisonFilter();
+ Map<String,String> options = new HashMap<String,String>();
+
+ for (int i = 0; i < 500; i++) {
+ String constant = Integer.toString(i);
+ options.put(PrimitiveComparisonFilter.CONST_VAL, new String(Base64.encodeBase64(constant.getBytes())));
+
+ Assert.assertEquals(constant, new String(filter.getConstant(options)));
+ }
+ }
+
+ @Test
+ public void testNumericBase64ConstantEncode() throws IOException {
+ PrimitiveComparisonFilter filter = new PrimitiveComparisonFilter();
+ Map<String,String> options = new HashMap<String,String>();
+ IntWritable writable = new IntWritable();
+
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ DataOutputStream out = new DataOutputStream(baos);
+
+ for (int i = 0; i < 500; i++) {
+ writable.set(i);
+ writable.write(out);
+
+ options.put(PrimitiveComparisonFilter.CONST_VAL, new String(Base64.encodeBase64(baos.toByteArray())));
+
+ byte[] bytes = filter.getConstant(options);
+
+ ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+ DataInputStream in = new DataInputStream(bais);
+ writable.readFields(in);
+
+ Assert.assertEquals(i, writable.get());
+
+ baos.reset();
+ }
+ }
+}
Added: hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestDoubleCompare.java
URL: http://svn.apache.org/viewvc/hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestDoubleCompare.java?rev=1619005&view=auto
==============================================================================
--- hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestDoubleCompare.java (added)
+++ hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestDoubleCompare.java Tue Aug 19 22:41:10 2014
@@ -0,0 +1,137 @@
+package org.apache.hadoop.hive.accumulo.predicate.compare;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestDoubleCompare {
+
+ private DoubleCompare doubleCompare;
+
+ @Before
+ public void setup() {
+ doubleCompare = new DoubleCompare();
+ byte[] db = new byte[8];
+ ByteBuffer.wrap(db).putDouble(10.5d);
+ doubleCompare.init(db);
+ }
+
+ public byte[] getBytes(double val) {
+ byte[] dBytes = new byte[8];
+ ByteBuffer.wrap(dBytes).putDouble(val);
+ BigDecimal bd = doubleCompare.serialize(dBytes);
+ assertEquals(bd.doubleValue(), val, 0);
+ return dBytes;
+ }
+
+ @Test
+ public void equal() {
+ Equal equalObj = new Equal(doubleCompare);
+ byte[] val = getBytes(10.5d);
+ assertTrue(equalObj.accept(val));
+ }
+
+ @Test
+ public void notEqual() {
+ NotEqual notEqualObj = new NotEqual(doubleCompare);
+ byte[] val = getBytes(11.0d);
+ assertTrue(notEqualObj.accept(val));
+
+ val = getBytes(10.5d);
+ assertFalse(notEqualObj.accept(val));
+
+ }
+
+ @Test
+ public void greaterThan() {
+ GreaterThan greaterThanObj = new GreaterThan(doubleCompare);
+ byte[] val = getBytes(11.0d);
+
+ assertTrue(greaterThanObj.accept(val));
+
+ val = getBytes(4.5d);
+ assertFalse(greaterThanObj.accept(val));
+
+ val = getBytes(10.5d);
+ assertFalse(greaterThanObj.accept(val));
+ }
+
+ @Test
+ public void greaterThanOrEqual() {
+ GreaterThanOrEqual greaterThanOrEqualObj = new GreaterThanOrEqual(doubleCompare);
+
+ byte[] val = getBytes(11.0d);
+
+ assertTrue(greaterThanOrEqualObj.accept(val));
+
+ val = getBytes(4.0d);
+ assertFalse(greaterThanOrEqualObj.accept(val));
+
+ val = getBytes(10.5d);
+ assertTrue(greaterThanOrEqualObj.accept(val));
+ }
+
+ @Test
+ public void lessThan() {
+
+ LessThan lessThanObj = new LessThan(doubleCompare);
+
+ byte[] val = getBytes(11.0d);
+
+ assertFalse(lessThanObj.accept(val));
+
+ val = getBytes(4.0d);
+ assertTrue(lessThanObj.accept(val));
+
+ val = getBytes(10.5d);
+ assertFalse(lessThanObj.accept(val));
+
+ }
+
+ @Test
+ public void lessThanOrEqual() {
+
+ LessThanOrEqual lessThanOrEqualObj = new LessThanOrEqual(doubleCompare);
+
+ byte[] val = getBytes(11.0d);
+
+ assertFalse(lessThanOrEqualObj.accept(val));
+
+ val = getBytes(4.0d);
+ assertTrue(lessThanOrEqualObj.accept(val));
+
+ val = getBytes(10.5d);
+ assertTrue(lessThanOrEqualObj.accept(val));
+ }
+
+ @Test
+ public void like() {
+ try {
+ Like likeObj = new Like(doubleCompare);
+ assertTrue(likeObj.accept(new byte[] {}));
+ fail("should not accept");
+ } catch (UnsupportedOperationException e) {
+ assertTrue(e.getMessage().contains(
+ "Like not supported for " + doubleCompare.getClass().getName()));
+ }
+ }
+
+ @Test
+ public void invalidSerialization() {
+ try {
+ byte[] badVal = new byte[4];
+ ByteBuffer.wrap(badVal).putInt(1);
+ doubleCompare.serialize(badVal);
+ fail("Should fail");
+ } catch (RuntimeException e) {
+ assertTrue(e.getMessage().contains(" occurred trying to build double value"));
+ }
+ }
+}
Added: hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestIntCompare.java
URL: http://svn.apache.org/viewvc/hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestIntCompare.java?rev=1619005&view=auto
==============================================================================
--- hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestIntCompare.java (added)
+++ hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestIntCompare.java Tue Aug 19 22:41:10 2014
@@ -0,0 +1,123 @@
+package org.apache.hadoop.hive.accumulo.predicate.compare;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.nio.ByteBuffer;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestIntCompare {
+ private IntCompare intCompare;
+
+ @Before
+ public void setup() {
+ byte[] ibytes = new byte[4];
+ ByteBuffer.wrap(ibytes).putInt(10);
+ intCompare = new IntCompare();
+ intCompare.init(ibytes);
+ }
+
+ public byte[] getBytes(int val) {
+ byte[] intBytes = new byte[4];
+ ByteBuffer.wrap(intBytes).putInt(val);
+ int serializedVal = intCompare.serialize(intBytes);
+ assertEquals(serializedVal, val);
+ return intBytes;
+ }
+
+ @Test
+ public void equal() {
+ Equal equalObj = new Equal(intCompare);
+ byte[] val = getBytes(10);
+ assertTrue(equalObj.accept(val));
+ }
+
+ @Test
+ public void notEqual() {
+ NotEqual notEqualObj = new NotEqual(intCompare);
+ byte[] val = getBytes(11);
+ assertTrue(notEqualObj.accept(val));
+
+ val = getBytes(10);
+ assertFalse(notEqualObj.accept(val));
+
+ }
+
+ @Test
+ public void greaterThan() {
+ GreaterThan greaterThanObj = new GreaterThan(intCompare);
+ byte[] val = getBytes(11);
+
+ assertTrue(greaterThanObj.accept(val));
+
+ val = getBytes(4);
+ assertFalse(greaterThanObj.accept(val));
+
+ val = getBytes(10);
+ assertFalse(greaterThanObj.accept(val));
+ }
+
+ @Test
+ public void greaterThanOrEqual() {
+ GreaterThanOrEqual greaterThanOrEqualObj = new GreaterThanOrEqual(intCompare);
+
+ byte[] val = getBytes(11);
+
+ assertTrue(greaterThanOrEqualObj.accept(val));
+
+ val = getBytes(4);
+ assertFalse(greaterThanOrEqualObj.accept(val));
+
+ val = getBytes(10);
+ assertTrue(greaterThanOrEqualObj.accept(val));
+ }
+
+ @Test
+ public void lessThan() {
+
+ LessThan lessThanObj = new LessThan(intCompare);
+
+ byte[] val = getBytes(11);
+
+ assertFalse(lessThanObj.accept(val));
+
+ val = getBytes(4);
+ assertTrue(lessThanObj.accept(val));
+
+ val = getBytes(10);
+ assertFalse(lessThanObj.accept(val));
+
+ }
+
+ @Test
+ public void lessThanOrEqual() {
+
+ LessThanOrEqual lessThanOrEqualObj = new LessThanOrEqual(intCompare);
+
+ byte[] val = getBytes(11);
+
+ assertFalse(lessThanOrEqualObj.accept(val));
+
+ val = getBytes(4);
+ assertTrue(lessThanOrEqualObj.accept(val));
+
+ val = getBytes(10);
+ assertTrue(lessThanOrEqualObj.accept(val));
+ }
+
+ @Test
+ public void like() {
+ try {
+ Like likeObj = new Like(intCompare);
+ assertTrue(likeObj.accept(new byte[] {}));
+ fail("should not accept");
+ } catch (UnsupportedOperationException e) {
+ assertTrue(e.getMessage().contains(
+ "Like not supported for " + intCompare.getClass().getName()));
+ }
+ }
+}
Added: hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestLongComparison.java
URL: http://svn.apache.org/viewvc/hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestLongComparison.java?rev=1619005&view=auto
==============================================================================
--- hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestLongComparison.java (added)
+++ hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestLongComparison.java Tue Aug 19 22:41:10 2014
@@ -0,0 +1,136 @@
+package org.apache.hadoop.hive.accumulo.predicate.compare;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.nio.ByteBuffer;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestLongComparison {
+
+ private LongCompare longComp;
+
+ @Before
+ public void setup() {
+ byte[] lBytes = new byte[8];
+ ByteBuffer.wrap(lBytes).putLong(10l);
+ longComp = new LongCompare();
+ longComp.init(lBytes);
+ }
+
+ public byte[] getBytes(long val) {
+ byte[] lonBytes = new byte[8];
+ ByteBuffer.wrap(lonBytes).putLong(val);
+ long lon = longComp.serialize(lonBytes);
+ assertEquals(lon, val);
+ return lonBytes;
+ }
+
+ @Test
+ public void equal() {
+ Equal equalObj = new Equal(longComp);
+ byte[] val = getBytes(10l);
+ assertTrue(equalObj.accept(val));
+ }
+
+ @Test
+ public void notEqual() {
+ NotEqual notEqualObj = new NotEqual(longComp);
+ byte[] val = getBytes(11l);
+ assertTrue(notEqualObj.accept(val));
+
+ val = getBytes(10l);
+ assertFalse(notEqualObj.accept(val));
+
+ }
+
+ @Test
+ public void greaterThan() {
+ GreaterThan greaterThanObj = new GreaterThan(longComp);
+ byte[] val = getBytes(11l);
+
+ assertTrue(greaterThanObj.accept(val));
+
+ val = getBytes(4l);
+ assertFalse(greaterThanObj.accept(val));
+
+ val = getBytes(10l);
+ assertFalse(greaterThanObj.accept(val));
+ }
+
+ @Test
+ public void greaterThanOrEqual() {
+ GreaterThanOrEqual greaterThanOrEqualObj = new GreaterThanOrEqual(longComp);
+
+ byte[] val = getBytes(11l);
+
+ assertTrue(greaterThanOrEqualObj.accept(val));
+
+ val = getBytes(4l);
+ assertFalse(greaterThanOrEqualObj.accept(val));
+
+ val = getBytes(10l);
+ assertTrue(greaterThanOrEqualObj.accept(val));
+ }
+
+ @Test
+ public void lessThan() {
+
+ LessThan lessThanObj = new LessThan(longComp);
+
+ byte[] val = getBytes(11l);
+
+ assertFalse(lessThanObj.accept(val));
+
+ val = getBytes(4l);
+ assertTrue(lessThanObj.accept(val));
+
+ val = getBytes(10l);
+ assertFalse(lessThanObj.accept(val));
+
+ }
+
+ @Test
+ public void lessThanOrEqual() {
+
+ LessThanOrEqual lessThanOrEqualObj = new LessThanOrEqual(longComp);
+
+ byte[] val = getBytes(11l);
+
+ assertFalse(lessThanOrEqualObj.accept(val));
+
+ val = getBytes(4l);
+ assertTrue(lessThanOrEqualObj.accept(val));
+
+ val = getBytes(10l);
+ assertTrue(lessThanOrEqualObj.accept(val));
+ }
+
+ @Test
+ public void like() {
+ try {
+ Like likeObj = new Like(longComp);
+ assertTrue(likeObj.accept(new byte[] {}));
+ fail("should not accept");
+ } catch (UnsupportedOperationException e) {
+ assertTrue(e.getMessage().contains("Like not supported for " + longComp.getClass().getName()));
+ }
+ }
+
+ @Test
+ public void invalidSerialization() {
+ try {
+ byte[] badVal = new byte[4];
+ ByteBuffer.wrap(badVal).putInt(1);
+ longComp.serialize(badVal);
+ fail("Should fail");
+ } catch (RuntimeException e) {
+ assertTrue(e.getMessage().contains(" occurred trying to build long value"));
+ }
+ }
+
+}
Added: hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestStringCompare.java
URL: http://svn.apache.org/viewvc/hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestStringCompare.java?rev=1619005&view=auto
==============================================================================
--- hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestStringCompare.java (added)
+++ hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestStringCompare.java Tue Aug 19 22:41:10 2014
@@ -0,0 +1,122 @@
+package org.apache.hadoop.hive.accumulo.predicate.compare;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestStringCompare {
+
+ private StringCompare strCompare;
+
+ @Before
+ public void setup() {
+ strCompare = new StringCompare();
+ strCompare.init("aaa".getBytes());
+ }
+
+ @Test
+ public void equal() {
+ Equal equalObj = new Equal(strCompare);
+ byte[] val = "aaa".getBytes();
+ assertTrue(equalObj.accept(val));
+ }
+
+ @Test
+ public void notEqual() {
+ NotEqual notEqualObj = new NotEqual(strCompare);
+ byte[] val = "aab".getBytes();
+ assertTrue(notEqualObj.accept(val));
+
+ val = "aaa".getBytes();
+ assertFalse(notEqualObj.accept(val));
+
+ }
+
+ @Test
+ public void greaterThan() {
+ GreaterThan greaterThanObj = new GreaterThan(strCompare);
+ byte[] val = "aab".getBytes();
+
+ assertTrue(greaterThanObj.accept(val));
+
+ val = "aa".getBytes();
+ assertFalse(greaterThanObj.accept(val));
+
+ val = "aaa".getBytes();
+ assertFalse(greaterThanObj.accept(val));
+ }
+
+ @Test
+ public void greaterThanOrEqual() {
+ GreaterThanOrEqual greaterThanOrEqualObj = new GreaterThanOrEqual(strCompare);
+ byte[] val = "aab".getBytes();
+
+ assertTrue(greaterThanOrEqualObj.accept(val));
+
+ val = "aa".getBytes();
+ assertFalse(greaterThanOrEqualObj.accept(val));
+
+ val = "aaa".getBytes();
+ assertTrue(greaterThanOrEqualObj.accept(val));
+ }
+
+ @Test
+ public void lessThan() {
+
+ LessThan lessThanObj = new LessThan(strCompare);
+
+ byte[] val = "aab".getBytes();
+
+ assertFalse(lessThanObj.accept(val));
+
+ val = "aa".getBytes();
+ assertTrue(lessThanObj.accept(val));
+
+ val = "aaa".getBytes();
+ assertFalse(lessThanObj.accept(val));
+
+ }
+
+ @Test
+ public void lessThanOrEqual() {
+
+ LessThanOrEqual lessThanOrEqualObj = new LessThanOrEqual(strCompare);
+
+ byte[] val = "aab".getBytes();
+
+ assertFalse(lessThanOrEqualObj.accept(val));
+
+ val = "aa".getBytes();
+ assertTrue(lessThanOrEqualObj.accept(val));
+
+ val = "aaa".getBytes();
+ assertTrue(lessThanOrEqualObj.accept(val));
+ }
+
+ @Test
+ public void like() {
+ Like likeObj = new Like(strCompare);
+ String condition = "%a";
+ assertTrue(likeObj.accept(condition.getBytes()));
+
+ condition = "%a%";
+ assertTrue(likeObj.accept(condition.getBytes()));
+
+ condition = "a%";
+ assertTrue(likeObj.accept(condition.getBytes()));
+
+ condition = "a%aa";
+ assertFalse(likeObj.accept(condition.getBytes()));
+
+ condition = "b%";
+ assertFalse(likeObj.accept(condition.getBytes()));
+
+ condition = "%ab%";
+ assertFalse(likeObj.accept(condition.getBytes()));
+
+ condition = "%ba";
+ assertFalse(likeObj.accept(condition.getBytes()));
+ }
+}
Added: hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/DelimitedAccumuloRowIdFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/DelimitedAccumuloRowIdFactory.java?rev=1619005&view=auto
==============================================================================
--- hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/DelimitedAccumuloRowIdFactory.java (added)
+++ hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/DelimitedAccumuloRowIdFactory.java Tue Aug 19 22:41:10 2014
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.accumulo.serde;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.hadoop.hive.accumulo.columns.ColumnEncoding;
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
+import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.log4j.Logger;
+
+/**
+ * Example AccumuloRowIdFactory which accepts a delimiter that is used to separate the components of
+ * some struct to place in the rowId.
+ */
+public class DelimitedAccumuloRowIdFactory extends DefaultAccumuloRowIdFactory {
+ private static final Logger log = Logger.getLogger(DelimitedAccumuloRowIdFactory.class);
+ public static final String ACCUMULO_COMPOSITE_DELIMITER = "accumulo.composite.delimiter";
+
+ private byte separator;
+
+ @Override
+ public void init(AccumuloSerDeParameters accumuloSerDeParams, Properties properties)
+ throws SerDeException {
+ super.init(accumuloSerDeParams, properties);
+
+ String delimiter = properties.getProperty(ACCUMULO_COMPOSITE_DELIMITER);
+ if (null == delimiter || delimiter.isEmpty()) {
+ throw new SerDeException("Did not find expected delimiter in configuration: "
+ + ACCUMULO_COMPOSITE_DELIMITER);
+ }
+
+ if (delimiter.length() != 1) {
+ log.warn("Configured delimiter is longer than one character, only using first character");
+ }
+
+ separator = (byte) delimiter.charAt(0);
+
+ log.info("Initialized DelimitedAccumuloRowIdFactory with separator of '" + separator + "'");
+ }
+
+ @Override
+ public ObjectInspector createRowIdObjectInspector(TypeInfo type) throws SerDeException {
+ return LazyFactory.createLazyObjectInspector(type, new byte[] {separator}, 0,
+ serdeParams.getNullSequence(), serdeParams.isEscaped(), serdeParams.getEscapeChar());
+ }
+
+ @Override
+ public LazyObjectBase createRowId(ObjectInspector inspector) throws SerDeException {
+ LazyObjectBase lazyObj = LazyFactory.createLazyObject(inspector,
+ ColumnEncoding.BINARY == rowIdMapping.getEncoding());
+ log.info("Created " + lazyObj.getClass() + " for rowId with inspector " + inspector.getClass());
+ return lazyObj;
+ }
+
+ @Override
+ public byte[] serializeRowId(Object object, StructField field, ByteStream.Output output)
+ throws IOException {
+ ObjectInspector inspector = field.getFieldObjectInspector();
+ if (inspector.getCategory() != ObjectInspector.Category.STRUCT) {
+ throw new IllegalStateException("invalid type value " + inspector.getTypeName());
+ }
+
+ output.reset();
+
+ StructObjectInspector structOI = (StructObjectInspector) inspector;
+ List<Object> elements = structOI.getStructFieldsDataAsList(object);
+ List<? extends StructField> fields = structOI.getAllStructFieldRefs();
+ for (int i = 0; i < elements.size(); i++) {
+ Object o = elements.get(i);
+ StructField structField = fields.get(i);
+
+ if (output.getLength() > 0) {
+ output.write(separator);
+ }
+
+ serializer.writeWithLevel(structField.getFieldObjectInspector(), o, output, rowIdMapping, 1);
+ }
+
+ return output.toByteArray();
+ }
+}
Added: hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/FirstCharAccumuloCompositeRowId.java
URL: http://svn.apache.org/viewvc/hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/FirstCharAccumuloCompositeRowId.java?rev=1619005&view=auto
==============================================================================
--- hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/FirstCharAccumuloCompositeRowId.java (added)
+++ hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/FirstCharAccumuloCompositeRowId.java Tue Aug 19 22:41:10 2014
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.accumulo.serde;
+
+import java.util.Arrays;
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.log4j.Logger;
+
+/**
+ * Gets the first character of each string in a struct
+ */
+public class FirstCharAccumuloCompositeRowId extends AccumuloCompositeRowId {
+ private static final Logger log = Logger.getLogger(FirstCharAccumuloCompositeRowId.class);
+
+ private Properties tbl;
+ private Configuration conf;
+ private byte[] bytes;
+ private int start, length;
+ private String bytesAsString;
+
+ public FirstCharAccumuloCompositeRowId(LazySimpleStructObjectInspector oi, Properties tbl,
+ Configuration conf) {
+ super(oi);
+ this.tbl = tbl;
+ this.conf = conf;
+ }
+
+ @Override
+ public void init(ByteArrayRef bytes, int start, int length) {
+ this.bytes = bytes.getData();
+ this.start = start;
+ this.length = length;
+ }
+
+ @Override
+ public Object getField(int fieldID) {
+ if (bytesAsString == null) {
+ this.bytesAsString = new String(bytes, start, length);
+ }
+
+ log.info("Data: " + bytesAsString + ", " + Arrays.toString(bytes));
+
+ // The separator for the hive row would be using \x02, so the separator for this struct would be
+ // \x02 + 1 = \x03
+ char separator = (char) ((int) oi.getSeparator() + 1);
+
+ log.info("Separator: " + String.format("%04x", (int) separator));
+
+ // Get the character/byte at the offset in the string equal to the fieldID
+ String[] fieldBytes = StringUtils.split(bytesAsString, separator);
+
+ log.info("Fields: " + Arrays.toString(fieldBytes));
+
+ return toLazyObject(fieldID, new byte[] {(byte) fieldBytes[fieldID].charAt(0)});
+ }
+}
Added: hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloRowSerializer.java
URL: http://svn.apache.org/viewvc/hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloRowSerializer.java?rev=1619005&view=auto
==============================================================================
--- hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloRowSerializer.java (added)
+++ hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloRowSerializer.java Tue Aug 19 22:41:10 2014
@@ -0,0 +1,311 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.accumulo.serde;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.accumulo.core.data.ColumnUpdate;
+import org.apache.accumulo.core.data.Mutation;
+import org.apache.accumulo.core.security.ColumnVisibility;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.accumulo.columns.ColumnEncoding;
+import org.apache.hadoop.hive.accumulo.columns.ColumnMapping;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
+import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.Text;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import com.google.common.base.Joiner;
+
+/**
+ *
+ */
+public class TestAccumuloRowSerializer {
+
+ @Test
+ public void testBufferResetBeforeUse() throws IOException {
+ ByteStream.Output output = new ByteStream.Output();
+ PrimitiveObjectInspector fieldObjectInspector = Mockito.mock(StringObjectInspector.class);
+ ColumnMapping mapping = Mockito.mock(ColumnMapping.class);
+
+ // Write some garbage to the buffer that should be erased
+ output.write("foobar".getBytes());
+
+ // Stub out the serializer
+ AccumuloRowSerializer serializer = Mockito.mock(AccumuloRowSerializer.class);
+
+ String object = "hello";
+
+ Mockito.when(
+ serializer.getSerializedValue(Mockito.any(ObjectInspector.class), Mockito.any(),
+ Mockito.any(ByteStream.Output.class), Mockito.any(ColumnMapping.class)))
+ .thenCallRealMethod();
+
+ Mockito.when(fieldObjectInspector.getCategory()).thenReturn(ObjectInspector.Category.PRIMITIVE);
+ Mockito.when(fieldObjectInspector.getPrimitiveCategory()).thenReturn(PrimitiveCategory.STRING);
+ Mockito.when(fieldObjectInspector.getPrimitiveWritableObject(Mockito.any(Object.class)))
+ .thenReturn(new Text(object));
+ Mockito.when(mapping.getEncoding()).thenReturn(ColumnEncoding.STRING);
+
+ // Invoke the method
+ serializer.getSerializedValue(fieldObjectInspector, object, output, mapping);
+
+ // Verify the buffer was reset (real output doesn't happen because it was mocked)
+ Assert.assertEquals(0, output.size());
+ }
+
+ @Test
+ public void testBinarySerialization() throws IOException, SerDeException {
+ List<String> columns = Arrays.asList("row", "cq1", "cq2", "cq3");
+ List<TypeInfo> types = Arrays.<TypeInfo> asList(TypeInfoFactory.stringTypeInfo,
+ TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo);
+ List<String> typeNames = new ArrayList<String>(types.size());
+ for (TypeInfo type : types) {
+ typeNames.add(type.getTypeName());
+ }
+
+ Properties tableProperties = new Properties();
+ tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,
+ ":rowid,cf:cq1#b,cf:cq2#b,cf:cq3");
+ tableProperties.setProperty(serdeConstants.FIELD_DELIM, " ");
+ tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
+ tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
+ AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
+ tableProperties, AccumuloSerDe.class.getSimpleName());
+ SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
+
+ LazySimpleStructObjectInspector oi = (LazySimpleStructObjectInspector) LazyFactory
+ .createLazyStructInspector(columns, types, serDeParams.getSeparators(),
+ serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(),
+ serDeParams.isEscaped(), serDeParams.getEscapeChar());
+
+ AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
+ accumuloSerDeParams.getColumnMappings(), new ColumnVisibility(),
+ accumuloSerDeParams.getRowIdFactory());
+
+ // Create the LazyStruct from the LazyStruct...Inspector
+ LazyStruct obj = (LazyStruct) LazyFactory.createLazyObject(oi);
+
+ ByteArrayRef byteRef = new ByteArrayRef();
+ byteRef.setData(new byte[] {'r', 'o', 'w', '1', ' ', '1', '0', ' ', '2', '0', ' ', 'v', 'a',
+ 'l', 'u', 'e'});
+ obj.init(byteRef, 0, byteRef.getData().length);
+
+ Mutation m = (Mutation) serializer.serialize(obj, oi);
+
+ Assert.assertArrayEquals("row1".getBytes(), m.getRow());
+
+ List<ColumnUpdate> updates = m.getUpdates();
+ Assert.assertEquals(3, updates.size());
+
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ DataOutputStream out = new DataOutputStream(baos);
+
+ ColumnUpdate update = updates.get(0);
+ Assert.assertEquals("cf", new String(update.getColumnFamily()));
+ Assert.assertEquals("cq1", new String(update.getColumnQualifier()));
+
+ out.writeInt(10);
+ Assert.assertArrayEquals(baos.toByteArray(), update.getValue());
+
+ update = updates.get(1);
+ Assert.assertEquals("cf", new String(update.getColumnFamily()));
+ Assert.assertEquals("cq2", new String(update.getColumnQualifier()));
+
+ baos.reset();
+ out.writeInt(20);
+ Assert.assertArrayEquals(baos.toByteArray(), update.getValue());
+
+ update = updates.get(2);
+ Assert.assertEquals("cf", new String(update.getColumnFamily()));
+ Assert.assertEquals("cq3", new String(update.getColumnQualifier()));
+
+ Assert.assertEquals("value", new String(update.getValue()));
+ }
+
+ @Test
+ public void testVisibilityLabel() throws IOException, SerDeException {
+ List<String> columns = Arrays.asList("row", "cq1", "cq2", "cq3");
+ List<TypeInfo> types = Arrays.<TypeInfo> asList(TypeInfoFactory.stringTypeInfo,
+ TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo);
+ List<String> typeNames = new ArrayList<String>(types.size());
+ for (TypeInfo type : types) {
+ typeNames.add(type.getTypeName());
+ }
+
+ Properties tableProperties = new Properties();
+ tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS,
+ ":rowid,cf:cq1#b,cf:cq2#b,cf:cq3");
+ tableProperties.setProperty(serdeConstants.FIELD_DELIM, " ");
+ tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
+ tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
+ AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
+ tableProperties, AccumuloSerDe.class.getSimpleName());
+ SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
+
+ LazySimpleStructObjectInspector oi = (LazySimpleStructObjectInspector) LazyFactory
+ .createLazyStructInspector(columns, types, serDeParams.getSeparators(),
+ serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(),
+ serDeParams.isEscaped(), serDeParams.getEscapeChar());
+
+ AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
+ accumuloSerDeParams.getColumnMappings(), new ColumnVisibility("foo"),
+ accumuloSerDeParams.getRowIdFactory());
+
+ // Create the LazyStruct from the LazyStruct...Inspector
+ LazyStruct obj = (LazyStruct) LazyFactory.createLazyObject(oi);
+
+ ByteArrayRef byteRef = new ByteArrayRef();
+ byteRef.setData(new byte[] {'r', 'o', 'w', '1', ' ', '1', '0', ' ', '2', '0', ' ', 'v', 'a',
+ 'l', 'u', 'e'});
+ obj.init(byteRef, 0, byteRef.getData().length);
+
+ Mutation m = (Mutation) serializer.serialize(obj, oi);
+
+ Assert.assertArrayEquals("row1".getBytes(), m.getRow());
+
+ List<ColumnUpdate> updates = m.getUpdates();
+ Assert.assertEquals(3, updates.size());
+
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ DataOutputStream out = new DataOutputStream(baos);
+
+ ColumnUpdate update = updates.get(0);
+ Assert.assertEquals("cf", new String(update.getColumnFamily()));
+ Assert.assertEquals("cq1", new String(update.getColumnQualifier()));
+ Assert.assertEquals("foo", new String(update.getColumnVisibility()));
+
+ out.writeInt(10);
+ Assert.assertArrayEquals(baos.toByteArray(), update.getValue());
+
+ update = updates.get(1);
+ Assert.assertEquals("cf", new String(update.getColumnFamily()));
+ Assert.assertEquals("cq2", new String(update.getColumnQualifier()));
+ Assert.assertEquals("foo", new String(update.getColumnVisibility()));
+
+ baos.reset();
+ out.writeInt(20);
+ Assert.assertArrayEquals(baos.toByteArray(), update.getValue());
+
+ update = updates.get(2);
+ Assert.assertEquals("cf", new String(update.getColumnFamily()));
+ Assert.assertEquals("cq3", new String(update.getColumnQualifier()));
+ Assert.assertEquals("foo", new String(update.getColumnVisibility()));
+
+ Assert.assertEquals("value", new String(update.getValue()));
+ }
+
+ @Test
+ public void testMapSerialization() throws IOException, SerDeException {
+ List<String> columns = Arrays.asList("row", "col");
+ List<TypeInfo> types = Arrays.<TypeInfo> asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory
+ .getMapTypeInfo(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo));
+ List<String> typeNames = new ArrayList<String>(types.size());
+ for (TypeInfo type : types) {
+ typeNames.add(type.getTypeName());
+ }
+
+ Properties tableProperties = new Properties();
+ tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid,cf:*");
+ tableProperties.setProperty(serdeConstants.FIELD_DELIM, " ");
+ tableProperties.setProperty(serdeConstants.COLLECTION_DELIM, ",");
+ tableProperties.setProperty(serdeConstants.MAPKEY_DELIM, ":");
+ tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
+ tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
+ AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
+ tableProperties, AccumuloSerDe.class.getSimpleName());
+ SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
+
+ TypeInfo stringTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
+ LazyStringObjectInspector stringOI = (LazyStringObjectInspector) LazyFactory
+ .createLazyObjectInspector(stringTypeInfo, new byte[] {0}, 0,
+ serDeParams.getNullSequence(), serDeParams.isEscaped(), serDeParams.getEscapeChar());
+
+ LazyMapObjectInspector mapOI = LazyObjectInspectorFactory.getLazySimpleMapObjectInspector(
+ stringOI, stringOI, (byte) ',', (byte) ':', serDeParams.getNullSequence(),
+ serDeParams.isEscaped(), serDeParams.getEscapeChar());
+
+ LazySimpleStructObjectInspector structOI = (LazySimpleStructObjectInspector) LazyObjectInspectorFactory
+ .getLazySimpleStructObjectInspector(columns, Arrays.asList(stringOI, mapOI), (byte) ' ',
+ serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(),
+ serDeParams.isEscaped(), serDeParams.getEscapeChar());
+
+ AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
+ accumuloSerDeParams.getColumnMappings(), new ColumnVisibility(),
+ accumuloSerDeParams.getRowIdFactory());
+
+ // Create the LazyStruct from the LazyStruct...Inspector
+ LazyStruct obj = (LazyStruct) LazyFactory.createLazyObject(structOI);
+
+ ByteArrayRef byteRef = new ByteArrayRef();
+ byteRef.setData("row1 cq1:10,cq2:20,cq3:value".getBytes());
+ obj.init(byteRef, 0, byteRef.getData().length);
+
+ Mutation m = (Mutation) serializer.serialize(obj, structOI);
+
+ Assert.assertArrayEquals("row1".getBytes(), m.getRow());
+
+ List<ColumnUpdate> updates = m.getUpdates();
+ Assert.assertEquals(3, updates.size());
+
+ ColumnUpdate update = updates.get(0);
+ Assert.assertEquals("cf", new String(update.getColumnFamily()));
+ Assert.assertEquals("cq1", new String(update.getColumnQualifier()));
+ Assert.assertEquals("10", new String(update.getValue()));
+
+ update = updates.get(1);
+ Assert.assertEquals("cf", new String(update.getColumnFamily()));
+ Assert.assertEquals("cq2", new String(update.getColumnQualifier()));
+ Assert.assertEquals("20", new String(update.getValue()));
+
+ update = updates.get(2);
+ Assert.assertEquals("cf", new String(update.getColumnFamily()));
+ Assert.assertEquals("cq3", new String(update.getColumnQualifier()));
+ Assert.assertEquals("value", new String(update.getValue()));
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testInvalidRowIdOffset() throws SerDeException {
+ ArrayList<ColumnMapping> mappings = new ArrayList<ColumnMapping>();
+
+ // Should fail because of the -1
+ new AccumuloRowSerializer(-1, null, mappings, new ColumnVisibility(), null);
+ }
+}
Added: hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java?rev=1619005&view=auto
==============================================================================
--- hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java (added)
+++ hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java Tue Aug 19 22:41:10 2014
@@ -0,0 +1,467 @@
+package org.apache.hadoop.hive.accumulo.serde;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.accumulo.core.data.ColumnUpdate;
+import org.apache.accumulo.core.data.Mutation;
+import org.apache.accumulo.core.security.ColumnVisibility;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.accumulo.AccumuloHiveRow;
+import org.apache.hadoop.hive.accumulo.LazyAccumuloRow;
+import org.apache.hadoop.hive.accumulo.columns.InvalidColumnMappingException;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.lazy.LazyArray;
+import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
+import org.apache.hadoop.hive.serde2.lazy.LazyMap;
+import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazyString;
+import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.Text;
+import org.apache.log4j.Logger;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.base.Joiner;
+
+public class TestAccumuloSerDe {
+ @SuppressWarnings("unused")
+ private static final Logger log = Logger.getLogger(TestAccumuloSerDe.class);
+
+ protected AccumuloSerDe serde;
+
+ @Before
+ public void setup() {
+ serde = new AccumuloSerDe();
+ }
+
+ @Test(expected = TooManyHiveColumnsException.class)
+ public void moreHiveColumnsThanAccumuloColumns() throws Exception {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowID,cf:f3");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "row,field1,field2,field3,field4");
+ properties.setProperty(serdeConstants.LIST_COLUMN_TYPES, "string,string,string,string,string");
+
+ serde.initialize(conf, properties);
+ serde.deserialize(new Text("fail"));
+ }
+
+ @Test(expected = TooManyAccumuloColumnsException.class)
+ public void moreAccumuloColumnsThanHiveColumns() throws Exception {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowID,cf:f1,cf:f2,cf:f3");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "row,field1,field2");
+ properties.setProperty(serdeConstants.LIST_COLUMN_TYPES, "string,string,string");
+
+ serde.initialize(conf, properties);
+ serde.deserialize(new Text("fail"));
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void emptyConfiguration() throws SerDeException {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+ serde.initialize(conf, properties);
+ }
+
+ @Test
+ public void simpleColumnMapping() throws SerDeException {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowID,cf:f1,cf:f2,cf:f3");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "row,field1,field2,field3");
+
+ serde.initialize(conf, properties);
+ assertNotNull(serde.getCachedRow());
+ }
+
+ @Test
+ public void withRowID() throws SerDeException {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, "cf:f1,:rowID,cf:f2,cf:f3");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "field1,field2,field3,field4");
+ serde.initialize(conf, properties);
+ assertNotNull(serde.getCachedRow());
+ }
+
+ @Test(expected = InvalidColumnMappingException.class)
+ public void invalidColMapping() throws Exception {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, "cf,cf:f2,cf:f3");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "field2,field3,field4");
+
+ serde.initialize(conf, properties);
+ AccumuloHiveRow row = new AccumuloHiveRow();
+ row.setRowId("r1");
+ Object obj = serde.deserialize(row);
+ assertTrue(obj instanceof LazyAccumuloRow);
+ LazyAccumuloRow lazyRow = (LazyAccumuloRow) obj;
+ lazyRow.getField(0);
+ }
+
+ @Test(expected = TooManyAccumuloColumnsException.class)
+ public void deserializeWithTooFewHiveColumns() throws Exception {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowID,cf:f1,cf:f2,cf:f3");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "row,col1,col2");
+ properties.setProperty(serdeConstants.LIST_COLUMN_TYPES, "string,string,string");
+
+ serde.initialize(conf, properties);
+ serde.deserialize(new Text("fail"));
+ }
+
+ @Test
+ public void testArraySerialization() throws Exception {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowID,cf:vals");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "row,values");
+ properties.setProperty(serdeConstants.LIST_COLUMN_TYPES, "string,array<string>");
+ properties.setProperty(serdeConstants.COLLECTION_DELIM, ":");
+
+ // Get one of the default separators to avoid having to set a custom separator
+ char separator = ':';
+
+ serde.initialize(conf, properties);
+
+ AccumuloHiveRow row = new AccumuloHiveRow();
+ row.setRowId("r1");
+ row.add("cf", "vals", ("value1" + separator + "value2" + separator + "value3").getBytes());
+
+ Object obj = serde.deserialize(row);
+
+ assertNotNull(obj);
+ assertTrue(obj instanceof LazyAccumuloRow);
+
+ LazyAccumuloRow lazyRow = (LazyAccumuloRow) obj;
+ Object field0 = lazyRow.getField(0);
+ assertNotNull(field0);
+ assertTrue(field0 instanceof LazyString);
+ assertEquals(row.getRowId(), ((LazyString) field0).getWritableObject().toString());
+
+ Object field1 = lazyRow.getField(1);
+ assertNotNull(field1);
+ assertTrue(field1 instanceof LazyArray);
+ LazyArray array = (LazyArray) field1;
+
+ List<Object> values = array.getList();
+ assertEquals(3, values.size());
+ for (int i = 0; i < 3; i++) {
+ Object o = values.get(i);
+ assertNotNull(o);
+ assertTrue(o instanceof LazyString);
+ assertEquals("value" + (i + 1), ((LazyString) o).getWritableObject().toString());
+ }
+ }
+
+ @Test
+ public void testMapSerialization() throws Exception {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowID,cf:vals");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "row,values");
+ properties.setProperty(serdeConstants.LIST_COLUMN_TYPES, "string,map<string,string>");
+ properties.setProperty(serdeConstants.COLLECTION_DELIM, ":");
+ properties.setProperty(serdeConstants.MAPKEY_DELIM, "=");
+
+ // Get one of the default separators to avoid having to set a custom separator
+ char collectionSeparator = ':', kvSeparator = '=';
+
+ serde.initialize(conf, properties);
+
+ AccumuloHiveRow row = new AccumuloHiveRow();
+ row.setRowId("r1");
+ row.add("cf", "vals", ("k1" + kvSeparator + "v1" + collectionSeparator + "k2" + kvSeparator
+ + "v2" + collectionSeparator + "k3" + kvSeparator + "v3").getBytes());
+
+ Object obj = serde.deserialize(row);
+
+ assertNotNull(obj);
+ assertTrue(obj instanceof LazyAccumuloRow);
+
+ LazyAccumuloRow lazyRow = (LazyAccumuloRow) obj;
+ Object field0 = lazyRow.getField(0);
+ assertNotNull(field0);
+ assertTrue(field0 instanceof LazyString);
+ assertEquals(row.getRowId(), ((LazyString) field0).getWritableObject().toString());
+
+ Object field1 = lazyRow.getField(1);
+ assertNotNull(field1);
+ assertTrue(field1 instanceof LazyMap);
+ LazyMap map = (LazyMap) field1;
+
+ Map<Object,Object> untypedMap = map.getMap();
+ assertEquals(3, map.getMapSize());
+ Set<String> expectedKeys = new HashSet<String>();
+ expectedKeys.add("k1");
+ expectedKeys.add("k2");
+ expectedKeys.add("k3");
+ for (Entry<Object,Object> entry : untypedMap.entrySet()) {
+ assertNotNull(entry.getKey());
+ assertTrue(entry.getKey() instanceof LazyString);
+ LazyString key = (LazyString) entry.getKey();
+
+ assertNotNull(entry.getValue());
+ assertTrue(entry.getValue() instanceof LazyString);
+ LazyString value = (LazyString) entry.getValue();
+
+ String strKey = key.getWritableObject().toString(), strValue = value.getWritableObject()
+ .toString();
+
+ assertTrue(expectedKeys.remove(strKey));
+
+ assertEquals(2, strValue.length());
+ assertTrue(strValue.startsWith("v"));
+ assertTrue(strValue.endsWith(Character.toString(strKey.charAt(1))));
+ }
+
+ assertTrue("Did not find expected keys: " + expectedKeys, expectedKeys.isEmpty());
+ }
+
+ @Test
+ public void deserialization() throws Exception {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowID,cf:f1,cf:f2,cf:f3");
+
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "blah,field2,field3,field4");
+ serde.initialize(conf, properties);
+
+ AccumuloHiveRow row = new AccumuloHiveRow();
+ row.setRowId("r1");
+ row.add("cf", "f1", "v1".getBytes());
+ row.add("cf", "f2", "v2".getBytes());
+
+ Object obj = serde.deserialize(row);
+ assertTrue(obj instanceof LazyAccumuloRow);
+
+ LazyAccumuloRow lazyRow = (LazyAccumuloRow) obj;
+ Object field0 = lazyRow.getField(0);
+ assertNotNull(field0);
+ assertTrue(field0 instanceof LazyString);
+ assertEquals(field0.toString(), "r1");
+
+ Object field1 = lazyRow.getField(1);
+ assertNotNull(field1);
+ assertTrue("Expected instance of LazyString but was " + field1.getClass(),
+ field1 instanceof LazyString);
+ assertEquals(field1.toString(), "v1");
+
+ Object field2 = lazyRow.getField(2);
+ assertNotNull(field2);
+ assertTrue(field2 instanceof LazyString);
+ assertEquals(field2.toString(), "v2");
+ }
+
+ @Test
+ public void testNoVisibilitySetsEmptyVisibility() throws SerDeException {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, "cf:f1,:rowID");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "field1,field2");
+
+ serde.initialize(conf, properties);
+
+ AccumuloRowSerializer serializer = serde.getSerializer();
+
+ Assert.assertEquals(new ColumnVisibility(), serializer.getVisibility());
+ }
+
+ @Test
+ public void testColumnVisibilityForSerializer() throws SerDeException {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, "cf:f1,:rowID");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "field1,field2");
+ properties.setProperty(AccumuloSerDeParameters.VISIBILITY_LABEL_KEY, "foobar");
+
+ serde.initialize(conf, properties);
+
+ AccumuloRowSerializer serializer = serde.getSerializer();
+
+ Assert.assertEquals(new ColumnVisibility("foobar"), serializer.getVisibility());
+ }
+
+ @Test
+ public void testCompositeKeyDeserialization() throws Exception {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowID,cf:f1");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "row,field1");
+ properties.setProperty(serdeConstants.LIST_COLUMN_TYPES,
+ "struct<col1:string,col2:string,col3:string>,string");
+ properties.setProperty(DelimitedAccumuloRowIdFactory.ACCUMULO_COMPOSITE_DELIMITER, "_");
+ properties.setProperty(AccumuloSerDeParameters.COMPOSITE_ROWID_FACTORY,
+ DelimitedAccumuloRowIdFactory.class.getName());
+
+ serde.initialize(conf, properties);
+
+ AccumuloHiveRow row = new AccumuloHiveRow();
+ row.setRowId("p1_p2_p3");
+ row.add("cf", "f1", "v1".getBytes());
+
+ Object obj = serde.deserialize(row);
+ assertTrue(obj instanceof LazyAccumuloRow);
+
+ LazyAccumuloRow lazyRow = (LazyAccumuloRow) obj;
+ Object field0 = lazyRow.getField(0);
+ assertNotNull(field0);
+ assertTrue(field0 instanceof LazyStruct);
+ LazyStruct struct = (LazyStruct) field0;
+ List<Object> fields = struct.getFieldsAsList();
+ assertEquals(3, fields.size());
+ for (int i = 0; i < fields.size(); i++) {
+ assertEquals(LazyString.class, fields.get(i).getClass());
+ assertEquals("p" + (i + 1), fields.get(i).toString());
+ }
+
+ Object field1 = lazyRow.getField(1);
+ assertNotNull(field1);
+ assertTrue("Expected instance of LazyString but was " + field1.getClass(),
+ field1 instanceof LazyString);
+ assertEquals(field1.toString(), "v1");
+ }
+
+ @Test
+ public void testStructOfMapSerialization() throws IOException, SerDeException {
+ List<String> columns = Arrays.asList("row", "col");
+ List<String> structColNames = Arrays.asList("map1", "map2");
+ TypeInfo mapTypeInfo = TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo,
+ TypeInfoFactory.stringTypeInfo);
+
+ // struct<map1:map<string,string>,map2:map<string,string>>,string
+ List<TypeInfo> types = Arrays.<TypeInfo> asList(
+ TypeInfoFactory.getStructTypeInfo(structColNames, Arrays.asList(mapTypeInfo, mapTypeInfo)),
+ TypeInfoFactory.stringTypeInfo);
+
+ Properties tableProperties = new Properties();
+ tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid,cf:cq");
+ // Use the default separators [0, 1, 2, 3, ..., 7]
+ tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
+ tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
+ AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
+ tableProperties, AccumuloSerDe.class.getSimpleName());
+ SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
+
+ byte[] seps = serDeParams.getSeparators();
+
+ // struct<map<k:v,k:v>_map<k:v,k:v>>>
+
+ TypeInfo stringTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
+ LazyStringObjectInspector stringOI = (LazyStringObjectInspector) LazyFactory
+ .createLazyObjectInspector(stringTypeInfo, new byte[] {0}, 0,
+ serDeParams.getNullSequence(), serDeParams.isEscaped(), serDeParams.getEscapeChar());
+
+ LazyMapObjectInspector mapOI = LazyObjectInspectorFactory.getLazySimpleMapObjectInspector(
+ stringOI, stringOI, seps[3], seps[4], serDeParams.getNullSequence(),
+ serDeParams.isEscaped(), serDeParams.getEscapeChar());
+
+ LazySimpleStructObjectInspector rowStructOI = (LazySimpleStructObjectInspector) LazyObjectInspectorFactory
+ .getLazySimpleStructObjectInspector(structColNames,
+ Arrays.<ObjectInspector> asList(mapOI, mapOI), (byte) seps[2],
+ serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(),
+ serDeParams.isEscaped(), serDeParams.getEscapeChar());
+
+ LazySimpleStructObjectInspector structOI = (LazySimpleStructObjectInspector) LazyObjectInspectorFactory
+ .getLazySimpleStructObjectInspector(columns, Arrays.asList(rowStructOI, stringOI), seps[1],
+ serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(),
+ serDeParams.isEscaped(), serDeParams.getEscapeChar());
+
+ AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
+ accumuloSerDeParams.getColumnMappings(), new ColumnVisibility(),
+ accumuloSerDeParams.getRowIdFactory());
+
+ Map<String,String> map1 = new HashMap<String,String>(), map2 = new HashMap<String,String>();
+
+ map1.put("key10", "value10");
+ map1.put("key11", "value11");
+
+ map2.put("key20", "value20");
+ map2.put("key21", "value21");
+
+ ByteArrayRef byteRef = new ByteArrayRef();
+ // Default separators are 1-indexed (instead of 0-indexed), thus the separator at offset 1 is
+ // (byte) 2
+ // The separator for the hive row is \x02, for the row Id struct, \x03, and the maps \x04 and
+ // \x05
+ String accumuloRow = "key10\5value10\4key11\5value11\3key20\5value20\4key21\5value21";
+ LazyStruct entireStruct = (LazyStruct) LazyFactory.createLazyObject(structOI);
+ byteRef.setData((accumuloRow + "\2foo").getBytes());
+ entireStruct.init(byteRef, 0, byteRef.getData().length);
+
+ Mutation m = serializer.serialize(entireStruct, structOI);
+ Assert.assertArrayEquals(accumuloRow.getBytes(), m.getRow());
+ Assert.assertEquals(1, m.getUpdates().size());
+ ColumnUpdate update = m.getUpdates().get(0);
+ Assert.assertEquals("cf", new String(update.getColumnFamily()));
+ Assert.assertEquals("cq", new String(update.getColumnQualifier()));
+ Assert.assertEquals("foo", new String(update.getValue()));
+
+ AccumuloHiveRow haRow = new AccumuloHiveRow(new String(m.getRow()));
+ haRow.add("cf", "cq", "foo".getBytes());
+
+ LazyAccumuloRow lazyAccumuloRow = new LazyAccumuloRow(structOI);
+ lazyAccumuloRow.init(haRow, accumuloSerDeParams.getColumnMappings(),
+ accumuloSerDeParams.getRowIdFactory());
+
+ List<Object> objects = lazyAccumuloRow.getFieldsAsList();
+ Assert.assertEquals(2, objects.size());
+
+ Assert.assertEquals("foo", objects.get(1).toString());
+
+ LazyStruct rowStruct = (LazyStruct) objects.get(0);
+ List<Object> rowObjects = rowStruct.getFieldsAsList();
+ Assert.assertEquals(2, rowObjects.size());
+
+ LazyMap rowMap = (LazyMap) rowObjects.get(0);
+ Map<?,?> actualMap = rowMap.getMap();
+ System.out.println("Actual map 1: " + actualMap);
+ Map<String,String> actualStringMap = new HashMap<String,String>();
+ for (Entry<?,?> entry : actualMap.entrySet()) {
+ actualStringMap.put(entry.getKey().toString(), entry.getValue().toString());
+ }
+
+ Assert.assertEquals(map1, actualStringMap);
+
+ rowMap = (LazyMap) rowObjects.get(1);
+ actualMap = rowMap.getMap();
+ System.out.println("Actual map 2: " + actualMap);
+ actualStringMap = new HashMap<String,String>();
+ for (Entry<?,?> entry : actualMap.entrySet()) {
+ actualStringMap.put(entry.getKey().toString(), entry.getValue().toString());
+ }
+
+ Assert.assertEquals(map2, actualStringMap);
+ }
+}
Added: hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDeParameters.java
URL: http://svn.apache.org/viewvc/hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDeParameters.java?rev=1619005&view=auto
==============================================================================
--- hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDeParameters.java (added)
+++ hive/trunk/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDeParameters.java Tue Aug 19 22:41:10 2014
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.accumulo.serde;
+
+import java.util.Properties;
+
+import org.apache.accumulo.core.security.Authorizations;
+import org.apache.accumulo.core.security.ColumnVisibility;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class TestAccumuloSerDeParameters {
+
+ @Test
+ public void testParseColumnVisibility() throws SerDeException {
+ Properties properties = new Properties();
+ Configuration conf = new Configuration();
+
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid,cf:f2,cf:f3");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "field1,field2,field3");
+ properties.setProperty(serdeConstants.LIST_TYPE_NAME, "string,string,string");
+ properties.setProperty(AccumuloSerDeParameters.VISIBILITY_LABEL_KEY, "foo&bar");
+
+ AccumuloSerDeParameters params = new AccumuloSerDeParameters(conf, properties,
+ AccumuloSerDe.class.getName());
+
+ ColumnVisibility cv = params.getTableVisibilityLabel();
+
+ Assert.assertEquals(new ColumnVisibility("foo&bar"), cv);
+ }
+
+ @Test
+ public void testParseAuthorizationsFromConf() throws SerDeException {
+ Configuration conf = new Configuration(false);
+ conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY, "foo,bar");
+
+ Authorizations auths = AccumuloSerDeParameters.getAuthorizationsFromConf(conf);
+ Assert.assertEquals(new Authorizations("foo,bar"), auths);
+ }
+
+ @Test
+ public void testParseAuthorizationsFromnProperties() throws SerDeException {
+ Configuration conf = new Configuration();
+ Properties properties = new Properties();
+
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid,cf:f2,cf:f3");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "field1,field2,field3");
+ properties.setProperty(serdeConstants.LIST_COLUMN_TYPES, "string,string,string");
+ properties.setProperty(AccumuloSerDeParameters.AUTHORIZATIONS_KEY, "foo,bar");
+
+ AccumuloSerDeParameters params = new AccumuloSerDeParameters(conf, properties,
+ AccumuloSerDe.class.getName());
+
+ Authorizations auths = params.getAuthorizations();
+ Assert.assertEquals(new Authorizations("foo,bar"), auths);
+ }
+
+ @Test
+ public void testNullAuthsFromProperties() throws SerDeException {
+ Configuration conf = new Configuration();
+ Properties properties = new Properties();
+
+ properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid,cf:f2,cf:f3");
+ properties.setProperty(serdeConstants.LIST_COLUMNS, "field1,field2,field3");
+ properties.setProperty(serdeConstants.LIST_COLUMN_TYPES, "string,string,string");
+
+ AccumuloSerDeParameters params = new AccumuloSerDeParameters(conf, properties,
+ AccumuloSerDe.class.getName());
+
+ Authorizations auths = params.getAuthorizations();
+ Assert.assertNull(auths);
+ }
+
+ @Test
+ public void testNullAuthsFromConf() throws SerDeException {
+ Configuration conf = new Configuration(false);
+
+ Authorizations auths = AccumuloSerDeParameters.getAuthorizationsFromConf(conf);
+ Assert.assertNull(auths);
+ }
+}