You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2016/12/09 05:01:47 UTC

[1/2] incubator-carbondata git commit: Added unit test for FilterQueryScannedResult

Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 8275640e0 -> e051d8f0a


Added unit test for FilterQueryScannedResult


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/9a24f238
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/9a24f238
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/9a24f238

Branch: refs/heads/master
Commit: 9a24f2380145b3a3d0d0186d8adce4bc2c2d4e2e
Parents: 8275640
Author: kunal642 <ku...@knoldus.in>
Authored: Wed Nov 9 18:20:14 2016 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Fri Dec 9 10:30:31 2016 +0530

----------------------------------------------------------------------
 .../scan/executor/util/RestructureUtilTest.java | 106 ++++++++
 .../impl/FilterQueryScannedResultTest.java      | 241 +++++++++++++++++++
 .../impl/NonFilterQueryScannedResultTest.java   | 238 ++++++++++++++++++
 3 files changed, 585 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9a24f238/core/src/test/java/org/apache/carbondata/scan/executor/util/RestructureUtilTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/scan/executor/util/RestructureUtilTest.java b/core/src/test/java/org/apache/carbondata/scan/executor/util/RestructureUtilTest.java
new file mode 100644
index 0000000..836586e
--- /dev/null
+++ b/core/src/test/java/org/apache/carbondata/scan/executor/util/RestructureUtilTest.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.scan.executor.util;
+
+import org.apache.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
+import org.apache.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
+import org.apache.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema;
+import org.apache.carbondata.scan.executor.infos.AggregatorInfo;
+import org.apache.carbondata.scan.model.QueryDimension;
+import org.apache.carbondata.scan.model.QueryMeasure;
+
+import org.junit.Test;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class RestructureUtilTest {
+
+  @Test public void testToGetUpdatedQueryDimension() {
+    ColumnSchema columnSchema1 = new ColumnSchema();
+    columnSchema1.setColumnName("Id");
+    ColumnSchema columnSchema2 = new ColumnSchema();
+    columnSchema2.setColumnName("Name");
+    ColumnSchema columnSchema3 = new ColumnSchema();
+    columnSchema3.setColumnName("Age");
+    ColumnSchema columnSchema4 = new ColumnSchema();
+    columnSchema4.setColumnName("Salary");
+    ColumnSchema columnSchema5 = new ColumnSchema();
+    columnSchema5.setColumnName("Address");
+
+    CarbonDimension tableBlockDimension1 = new CarbonDimension(columnSchema1, 1, 1, 1, 1);
+    CarbonDimension tableBlockDimension2 = new CarbonDimension(columnSchema2, 5, 5, 5, 5);
+    List<CarbonDimension> tableBlockDimensions =
+        Arrays.asList(tableBlockDimension1, tableBlockDimension2);
+
+    CarbonDimension tableComplexDimension1 = new CarbonDimension(columnSchema3, 4, 4, 4, 4);
+    CarbonDimension tableComplexDimension2 = new CarbonDimension(columnSchema4, 2, 2, 2, 2);
+    List<CarbonDimension> tableComplexDimensions =
+        Arrays.asList(tableComplexDimension1, tableComplexDimension2);
+
+    QueryDimension queryDimension1 = new QueryDimension("Id");
+    queryDimension1.setDimension(tableBlockDimension1);
+    QueryDimension queryDimension2 = new QueryDimension("Name");
+    queryDimension2.setDimension(tableComplexDimension2);
+    QueryDimension queryDimension3 = new QueryDimension("Address");
+    queryDimension3.setDimension(new CarbonDimension(columnSchema5, 3, 3, 3, 3));
+
+    List<QueryDimension> queryDimensions =
+        Arrays.asList(queryDimension1, queryDimension2, queryDimension3);
+
+    List<QueryDimension> result = RestructureUtil
+        .getUpdatedQueryDimension(queryDimensions, tableBlockDimensions, tableComplexDimensions);
+
+    assertThat(result, is(equalTo(Arrays.asList(queryDimension1, queryDimension2))));
+  }
+
+  @Test public void testToGetAggregatorInfos() {
+    ColumnSchema columnSchema1 = new ColumnSchema();
+    columnSchema1.setColumnName("Id");
+    ColumnSchema columnSchema2 = new ColumnSchema();
+    columnSchema2.setColumnName("Name");
+    ColumnSchema columnSchema3 = new ColumnSchema();
+    columnSchema3.setColumnName("Age");
+
+    CarbonMeasure carbonMeasure1 = new CarbonMeasure(columnSchema1, 1);
+    CarbonMeasure carbonMeasure2 = new CarbonMeasure(columnSchema2, 2);
+    CarbonMeasure carbonMeasure3 = new CarbonMeasure(columnSchema3, 3);
+    carbonMeasure3.setDefaultValue("3".getBytes());
+    List<CarbonMeasure> currentBlockMeasures = Arrays.asList(carbonMeasure1, carbonMeasure2);
+
+    QueryMeasure queryMeasure1 = new QueryMeasure("Id");
+    queryMeasure1.setMeasure(carbonMeasure1);
+    QueryMeasure queryMeasure2 = new QueryMeasure("Name");
+    queryMeasure2.setMeasure(carbonMeasure2);
+    QueryMeasure queryMeasure3 = new QueryMeasure("Age");
+    queryMeasure3.setMeasure(carbonMeasure3);
+    List<QueryMeasure> queryMeasures = Arrays.asList(queryMeasure1, queryMeasure2, queryMeasure3);
+
+    AggregatorInfo aggregatorInfo =
+        RestructureUtil.getAggregatorInfos(queryMeasures, currentBlockMeasures);
+    boolean[] measuresExist = { true, true, false };
+    assertThat(aggregatorInfo.getMeasureExists(), is(equalTo(measuresExist)));
+    Object[] defaultValues = { null, null, "3".getBytes() };
+    assertThat(aggregatorInfo.getDefaultValues(), is(equalTo(defaultValues)));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9a24f238/core/src/test/java/org/apache/carbondata/scan/result/impl/FilterQueryScannedResultTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/scan/result/impl/FilterQueryScannedResultTest.java b/core/src/test/java/org/apache/carbondata/scan/result/impl/FilterQueryScannedResultTest.java
new file mode 100644
index 0000000..42fe8fb
--- /dev/null
+++ b/core/src/test/java/org/apache/carbondata/scan/result/impl/FilterQueryScannedResultTest.java
@@ -0,0 +1,241 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.scan.result.impl;
+
+import mockit.Mock;
+import mockit.MockUp;
+
+import org.apache.carbondata.core.carbon.datastore.chunk.DimensionChunkAttributes;
+import org.apache.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
+import org.apache.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
+import org.apache.carbondata.core.carbon.datastore.chunk.impl.ColumnGroupDimensionDataChunk;
+import org.apache.carbondata.core.carbon.metadata.blocklet.datachunk.PresenceMeta;
+import org.apache.carbondata.core.datastorage.store.dataholder.CarbonReadDataHolder;
+import org.apache.carbondata.scan.complextypes.ArrayQueryType;
+import org.apache.carbondata.scan.executor.infos.BlockExecutionInfo;
+import org.apache.carbondata.scan.executor.infos.KeyStructureInfo;
+import org.apache.carbondata.scan.filter.GenericQueryType;
+import org.apache.carbondata.scan.model.QueryDimension;
+
+import org.apache.oro.text.regex.MalformedPatternException;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.util.BitSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+public class FilterQueryScannedResultTest {
+
+  private static FilterQueryScannedResult filterQueryScannedResult;
+
+  @BeforeClass public static void setUp() {
+    BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
+    blockExecutionInfo.setFixedLengthKeySize(2);
+    blockExecutionInfo.setNoDictionaryBlockIndexes(new int[] { 0, 1 });
+    blockExecutionInfo.setDictionaryColumnBlockIndex(new int[] { 0, 1 });
+    Map<Integer, KeyStructureInfo> columnGourpToKeyInfo = new HashMap<>();
+    columnGourpToKeyInfo.put(1, new KeyStructureInfo());
+    blockExecutionInfo.setColumnGroupToKeyStructureInfo(columnGourpToKeyInfo);
+    Map<Integer, GenericQueryType> genericQueryType = new HashMap<>();
+    genericQueryType.put(1, new ArrayQueryType("Query1", "Parent", 1));
+    blockExecutionInfo.setComplexDimensionInfoMap(genericQueryType);
+    blockExecutionInfo.setComplexColumnParentBlockIndexes(new int[] { 1 });
+    QueryDimension[] queryDimensions = { new QueryDimension("Col1"), new QueryDimension("Col2") };
+    blockExecutionInfo.setQueryDimensions(queryDimensions);
+    filterQueryScannedResult = new FilterQueryScannedResult(blockExecutionInfo);
+    filterQueryScannedResult.setIndexes(new int[] { 1, 2, 3, 4 });
+    DimensionChunkAttributes dimensionChunkAttributes = new DimensionChunkAttributes();
+    dimensionChunkAttributes.setEachRowSize(0);
+    ColumnGroupDimensionDataChunk[] columnGroupDimensionDataChunks =
+        { new ColumnGroupDimensionDataChunk(new byte[] { 1, 2 }, dimensionChunkAttributes),
+            new ColumnGroupDimensionDataChunk(new byte[] { 2, 3 }, dimensionChunkAttributes) };
+    filterQueryScannedResult.setDimensionChunks(columnGroupDimensionDataChunks);
+    MeasureColumnDataChunk measureColumnDataChunk = new MeasureColumnDataChunk();
+    filterQueryScannedResult
+        .setMeasureChunks(new MeasureColumnDataChunk[] { measureColumnDataChunk });
+  }
+
+  @Test public void testToGetDictionaryKeyArray() {
+    new MockUp<ColumnGroupDimensionDataChunk>() {
+      @Mock @SuppressWarnings("unused") public int fillChunkData(byte[] data, int offset, int rowId,
+          KeyStructureInfo restructuringInfo) {
+        return 1;
+      }
+    };
+    byte[] keyArray = filterQueryScannedResult.getDictionaryKeyArray();
+    byte[] expectedResult = { 0, 0 };
+    assertThat(expectedResult, is(equalTo(keyArray)));
+  }
+
+  @Test public void testToGetDictionaryKeyIntegerArray() {
+    new MockUp<ColumnGroupDimensionDataChunk>() {
+      @Mock @SuppressWarnings("unused")
+      public int fillConvertedChunkData(int rowId, int columnIndex, int[] row,
+          KeyStructureInfo info) {
+        return 1;
+      }
+    };
+    int[] keyArray = filterQueryScannedResult.getDictionaryKeyIntegerArray();
+    int[] expectedResult = { 0, 0 };
+    assertThat(expectedResult, is(equalTo(keyArray)));
+  }
+
+  @Test public void testToGetComplexTypeKeyArray() {
+    new MockUp<ByteArrayOutputStream>() {
+      @Mock @SuppressWarnings("unused") public synchronized byte toByteArray()[] {
+        return new byte[] { 1, 2, 3 };
+      }
+    };
+    new MockUp<ArrayQueryType>() {
+      @Mock @SuppressWarnings("unused") public void parseBlocksAndReturnComplexColumnByteArray(
+          DimensionColumnDataChunk[] dimensionColumnDataChunks, int rowNumber,
+          DataOutputStream dataOutputStream) throws IOException {
+      }
+    };
+    filterQueryScannedResult.incrementCounter();
+    byte[][] keyArray = filterQueryScannedResult.getComplexTypeKeyArray();
+    byte[][] expectedResult = { { 1, 2, 3 } };
+    assertThat(expectedResult, is(equalTo(keyArray)));
+  }
+
+  @Test public void testToGetNoDictionaryKeyArray() {
+    new MockUp<ColumnGroupDimensionDataChunk>() {
+      @Mock @SuppressWarnings("unused") public byte[] getChunkData(int rowId) {
+        return new byte[] { 1, 2, 3 };
+      }
+    };
+    byte[][] dictionaryKeyArray = filterQueryScannedResult.getNoDictionaryKeyArray();
+    byte[][] expectedResult = { { 1, 2, 3 }, { 1, 2, 3 } };
+    assertThat(expectedResult, is(equalTo(dictionaryKeyArray)));
+  }
+
+  @Test public void testToGetNoDictionaryKeyStringArray() {
+    new MockUp<ColumnGroupDimensionDataChunk>() {
+      @Mock @SuppressWarnings("unused") public byte[] getChunkData(int rowId) {
+        return "1".getBytes();
+      }
+    };
+    filterQueryScannedResult.incrementCounter();
+    String[] dictionaryKeyStringArray = filterQueryScannedResult.getNoDictionaryKeyStringArray();
+    String[] expectedResult = { "1", "1" };
+    assertThat(expectedResult, is(equalTo(dictionaryKeyStringArray)));
+  }
+
+  @Test public void testToGetCurrenrRowId() {
+    int rowId = filterQueryScannedResult.getCurrenrRowId();
+    int expectedResult = 3;
+    assertThat(expectedResult, is(equalTo(rowId)));
+  }
+
+  @Test public void testToGetDimensionKey() {
+    new MockUp<ColumnGroupDimensionDataChunk>() {
+      @Mock @SuppressWarnings("unused") public byte[] getChunkData(int rowId) {
+        return "1".getBytes();
+      }
+    };
+    byte[] dictionaryKeyStringArray = filterQueryScannedResult.getDimensionKey(0);
+    byte[] expectedResult = "1".getBytes();
+    assertThat(expectedResult, is(equalTo(dictionaryKeyStringArray)));
+  }
+
+  @Test public void testToGetIsNullMeasureValue() {
+    new MockUp<MeasureColumnDataChunk>() {
+      @Mock @SuppressWarnings("unused") public PresenceMeta getNullValueIndexHolder() {
+        return new PresenceMeta();
+
+      }
+    };
+    new MockUp<PresenceMeta>() {
+      @Mock @SuppressWarnings("unused") public BitSet getBitSet() {
+        return new BitSet();
+      }
+    };
+    new MockUp<BitSet>() {
+      @Mock @SuppressWarnings("unused") public boolean get(int bitIndex) {
+        return false;
+      }
+    };
+
+    boolean nullMeasureValue = filterQueryScannedResult.isNullMeasureValue(0);
+    assertThat(false, is(equalTo(nullMeasureValue)));
+  }
+
+  @Test public void testToGetLongMeasureValue() {
+    new MockUp<MeasureColumnDataChunk>() {
+      @Mock @SuppressWarnings("unused") public CarbonReadDataHolder getMeasureDataHolder() {
+        return new CarbonReadDataHolder();
+
+      }
+    };
+    new MockUp<CarbonReadDataHolder>() {
+      @Mock @SuppressWarnings("unused") public long getReadableLongValueByIndex(int index) {
+        return 2L;
+      }
+    };
+    long longMeasureValue = filterQueryScannedResult.getLongMeasureValue(0);
+    long expectedResult = 2L;
+    assertThat(expectedResult, is(equalTo(longMeasureValue)));
+  }
+
+  @Test public void testToGetDoubleMeasureValue() {
+    new MockUp<MeasureColumnDataChunk>() {
+      @Mock @SuppressWarnings("unused") public CarbonReadDataHolder getMeasureDataHolder() {
+        return new CarbonReadDataHolder();
+
+      }
+    };
+    new MockUp<CarbonReadDataHolder>() {
+      @Mock @SuppressWarnings("unused") public double getReadableDoubleValueByIndex(int index) {
+        return 2.0;
+      }
+    };
+    double longMeasureValue = filterQueryScannedResult.getDoubleMeasureValue(0);
+    double expectedResult = 2.0;
+    assertThat(expectedResult, is(equalTo(longMeasureValue)));
+  }
+
+  @Test public void testToGetBigDecimalMeasureValue() {
+    new MockUp<MeasureColumnDataChunk>() {
+      @Mock @SuppressWarnings("unused") public CarbonReadDataHolder getMeasureDataHolder() {
+        return new CarbonReadDataHolder();
+
+      }
+    };
+    new MockUp<CarbonReadDataHolder>() {
+      @Mock @SuppressWarnings("unused")
+      public BigDecimal getReadableBigDecimalValueByIndex(int index) {
+        return new BigDecimal(2);
+      }
+    };
+    BigDecimal longMeasureValue = filterQueryScannedResult.getBigDecimalMeasureValue(0);
+    BigDecimal expectedResult = new BigDecimal(2);
+    assertThat(expectedResult, is(equalTo(longMeasureValue)));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9a24f238/core/src/test/java/org/apache/carbondata/scan/result/impl/NonFilterQueryScannedResultTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/scan/result/impl/NonFilterQueryScannedResultTest.java b/core/src/test/java/org/apache/carbondata/scan/result/impl/NonFilterQueryScannedResultTest.java
new file mode 100644
index 0000000..7e5d085
--- /dev/null
+++ b/core/src/test/java/org/apache/carbondata/scan/result/impl/NonFilterQueryScannedResultTest.java
@@ -0,0 +1,238 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.scan.result.impl;
+
+import mockit.Mock;
+import mockit.MockUp;
+
+import org.apache.carbondata.core.carbon.datastore.chunk.DimensionChunkAttributes;
+import org.apache.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
+import org.apache.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
+import org.apache.carbondata.core.carbon.datastore.chunk.impl.ColumnGroupDimensionDataChunk;
+import org.apache.carbondata.core.carbon.metadata.blocklet.datachunk.PresenceMeta;
+import org.apache.carbondata.core.datastorage.store.dataholder.CarbonReadDataHolder;
+import org.apache.carbondata.scan.complextypes.ArrayQueryType;
+import org.apache.carbondata.scan.executor.infos.BlockExecutionInfo;
+import org.apache.carbondata.scan.executor.infos.KeyStructureInfo;
+import org.apache.carbondata.scan.filter.GenericQueryType;
+import org.apache.carbondata.scan.model.QueryDimension;
+
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.util.BitSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+public class NonFilterQueryScannedResultTest {
+
+  private static NonFilterQueryScannedResult filterQueryScannedResult;
+
+  @BeforeClass public static void setUp() {
+    BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
+    blockExecutionInfo.setFixedLengthKeySize(2);
+    blockExecutionInfo.setNoDictionaryBlockIndexes(new int[] { 0, 1 });
+    blockExecutionInfo.setDictionaryColumnBlockIndex(new int[] { 0, 1 });
+    Map<Integer, KeyStructureInfo> columnGourpToKeyInfo = new HashMap<>();
+    columnGourpToKeyInfo.put(1, new KeyStructureInfo());
+    blockExecutionInfo.setColumnGroupToKeyStructureInfo(columnGourpToKeyInfo);
+    Map<Integer, GenericQueryType> genericQueryType = new HashMap<>();
+    genericQueryType.put(1, new ArrayQueryType("Query1", "Parent", 1));
+    blockExecutionInfo.setComplexDimensionInfoMap(genericQueryType);
+    blockExecutionInfo.setComplexColumnParentBlockIndexes(new int[] { 1 });
+    QueryDimension[] queryDimensions = { new QueryDimension("Col1"), new QueryDimension("Col2") };
+    blockExecutionInfo.setQueryDimensions(queryDimensions);
+    filterQueryScannedResult = new NonFilterQueryScannedResult(blockExecutionInfo);
+    DimensionChunkAttributes dimensionChunkAttributes = new DimensionChunkAttributes();
+    dimensionChunkAttributes.setEachRowSize(0);
+    ColumnGroupDimensionDataChunk[] columnGroupDimensionDataChunks =
+        { new ColumnGroupDimensionDataChunk(new byte[] { 1, 2 }, dimensionChunkAttributes),
+            new ColumnGroupDimensionDataChunk(new byte[] { 2, 3 }, dimensionChunkAttributes) };
+    filterQueryScannedResult.setDimensionChunks(columnGroupDimensionDataChunks);
+    MeasureColumnDataChunk measureColumnDataChunk = new MeasureColumnDataChunk();
+    filterQueryScannedResult
+        .setMeasureChunks(new MeasureColumnDataChunk[] { measureColumnDataChunk });
+  }
+
+  @Test public void testToGetDictionaryKeyArray() {
+    new MockUp<ColumnGroupDimensionDataChunk>() {
+      @Mock @SuppressWarnings("unused") public int fillChunkData(byte[] data, int offset, int rowId,
+          KeyStructureInfo restructuringInfo) {
+        return 1;
+      }
+    };
+    byte[] keyArray = filterQueryScannedResult.getDictionaryKeyArray();
+    byte[] expectedResult = { 0, 0 };
+    assertThat(expectedResult, is(equalTo(keyArray)));
+  }
+
+  @Test public void testToGetDictionaryKeyIntegerArray() {
+    new MockUp<ColumnGroupDimensionDataChunk>() {
+      @Mock @SuppressWarnings("unused")
+      public int fillConvertedChunkData(int rowId, int columnIndex, int[] row,
+          KeyStructureInfo info) {
+        return 1;
+      }
+    };
+    int[] keyArray = filterQueryScannedResult.getDictionaryKeyIntegerArray();
+    int[] expectedResult = { 0, 0 };
+    assertThat(expectedResult, is(equalTo(keyArray)));
+  }
+
+  @Test public void testToGetComplexTypeKeyArray() {
+    new MockUp<ByteArrayOutputStream>() {
+      @Mock @SuppressWarnings("unused") public synchronized byte toByteArray()[] {
+        return new byte[] { 1, 2, 3 };
+      }
+    };
+    new MockUp<ArrayQueryType>() {
+      @Mock @SuppressWarnings("unused") public void parseBlocksAndReturnComplexColumnByteArray(
+          DimensionColumnDataChunk[] dimensionColumnDataChunks, int rowNumber,
+          DataOutputStream dataOutputStream) throws IOException {
+      }
+    };
+    filterQueryScannedResult.incrementCounter();
+    byte[][] keyArray = filterQueryScannedResult.getComplexTypeKeyArray();
+    byte[][] expectedResult = { { 1, 2, 3 } };
+    assertThat(expectedResult, is(equalTo(keyArray)));
+  }
+
+  @Test public void testToGetNoDictionaryKeyArray() {
+    new MockUp<ColumnGroupDimensionDataChunk>() {
+      @Mock @SuppressWarnings("unused") public byte[] getChunkData(int rowId) {
+        return new byte[] { 1, 2, 3 };
+      }
+    };
+    byte[][] dictionaryKeyArray = filterQueryScannedResult.getNoDictionaryKeyArray();
+    byte[][] expectedResult = { { 1, 2, 3 }, { 1, 2, 3 } };
+    assertThat(expectedResult, is(equalTo(dictionaryKeyArray)));
+  }
+
+  @Test public void testToGetNoDictionaryKeyStringArray() {
+    new MockUp<ColumnGroupDimensionDataChunk>() {
+      @Mock @SuppressWarnings("unused") public byte[] getChunkData(int rowId) {
+        return "1".getBytes();
+      }
+    };
+    filterQueryScannedResult.incrementCounter();
+    String[] dictionaryKeyStringArray = filterQueryScannedResult.getNoDictionaryKeyStringArray();
+    String[] expectedResult = { "1", "1" };
+    assertThat(expectedResult, is(equalTo(dictionaryKeyStringArray)));
+  }
+
+  @Test public void testToGetCurrenrRowId() {
+    int rowId = filterQueryScannedResult.getCurrenrRowId();
+    int expectedResult = 2;
+    assertThat(expectedResult, is(equalTo(rowId)));
+  }
+
+  @Test public void testToGetDimensionKey() {
+    new MockUp<ColumnGroupDimensionDataChunk>() {
+      @Mock @SuppressWarnings("unused") public byte[] getChunkData(int rowId) {
+        return "1".getBytes();
+      }
+    };
+    byte[] dictionaryKeyStringArray = filterQueryScannedResult.getDimensionKey(0);
+    byte[] expectedResult = "1".getBytes();
+    assertThat(expectedResult, is(equalTo(dictionaryKeyStringArray)));
+  }
+
+  @Test public void testToGetIsNullMeasureValue() {
+    new MockUp<MeasureColumnDataChunk>() {
+      @Mock @SuppressWarnings("unused") public PresenceMeta getNullValueIndexHolder() {
+        return new PresenceMeta();
+
+      }
+    };
+    new MockUp<PresenceMeta>() {
+      @Mock @SuppressWarnings("unused") public BitSet getBitSet() {
+        return new BitSet();
+      }
+    };
+    new MockUp<BitSet>() {
+      @Mock @SuppressWarnings("unused") public boolean get(int bitIndex) {
+        return false;
+      }
+    };
+
+    boolean nullMeasureValue = filterQueryScannedResult.isNullMeasureValue(0);
+    assertThat(false, is(equalTo(nullMeasureValue)));
+  }
+
+  @Test public void testToGetLongMeasureValue() {
+    new MockUp<MeasureColumnDataChunk>() {
+      @Mock @SuppressWarnings("unused") public CarbonReadDataHolder getMeasureDataHolder() {
+        return new CarbonReadDataHolder();
+
+      }
+    };
+    new MockUp<CarbonReadDataHolder>() {
+      @Mock @SuppressWarnings("unused") public long getReadableLongValueByIndex(int index) {
+        return 2L;
+      }
+    };
+    long longMeasureValue = filterQueryScannedResult.getLongMeasureValue(0);
+    long expectedResult = 2L;
+    assertThat(expectedResult, is(equalTo(longMeasureValue)));
+  }
+
+  @Test public void testToGetDoubleMeasureValue() {
+    new MockUp<MeasureColumnDataChunk>() {
+      @Mock @SuppressWarnings("unused") public CarbonReadDataHolder getMeasureDataHolder() {
+        return new CarbonReadDataHolder();
+
+      }
+    };
+    new MockUp<CarbonReadDataHolder>() {
+      @Mock @SuppressWarnings("unused") public double getReadableDoubleValueByIndex(int index) {
+        return 2.0;
+      }
+    };
+    double longMeasureValue = filterQueryScannedResult.getDoubleMeasureValue(0);
+    double expectedResult = 2.0;
+    assertThat(expectedResult, is(equalTo(longMeasureValue)));
+  }
+
+  @Test public void testToGetBigDecimalMeasureValue() {
+    new MockUp<MeasureColumnDataChunk>() {
+      @Mock @SuppressWarnings("unused") public CarbonReadDataHolder getMeasureDataHolder() {
+        return new CarbonReadDataHolder();
+
+      }
+    };
+    new MockUp<CarbonReadDataHolder>() {
+      @Mock @SuppressWarnings("unused")
+      public BigDecimal getReadableBigDecimalValueByIndex(int index) {
+        return new BigDecimal(2);
+      }
+    };
+    BigDecimal longMeasureValue = filterQueryScannedResult.getBigDecimalMeasureValue(0);
+    BigDecimal expectedResult = new BigDecimal(2);
+    assertThat(expectedResult, is(equalTo(longMeasureValue)));
+  }
+
+}


[2/2] incubator-carbondata git commit: [CARBONDATA-416] Unit test for result.impl package This closes #322

Posted by ra...@apache.org.
[CARBONDATA-416] Unit test for result.impl package This closes #322


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/e051d8f0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/e051d8f0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/e051d8f0

Branch: refs/heads/master
Commit: e051d8f0a6e6cbb87c43f32dd55f49d8052df9f0
Parents: 8275640 9a24f23
Author: ravipesala <ra...@gmail.com>
Authored: Fri Dec 9 10:31:21 2016 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Fri Dec 9 10:31:21 2016 +0530

----------------------------------------------------------------------
 .../scan/executor/util/RestructureUtilTest.java | 106 ++++++++
 .../impl/FilterQueryScannedResultTest.java      | 241 +++++++++++++++++++
 .../impl/NonFilterQueryScannedResultTest.java   | 238 ++++++++++++++++++
 3 files changed, 585 insertions(+)
----------------------------------------------------------------------