You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ch...@apache.org on 2016/08/15 07:09:06 UTC
[21/52] [partial] incubator-carbondata git commit: Renamed packages
to org.apache.carbondata and fixed errors
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java
deleted file mode 100644
index a92539e..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.datastore.chunk.reader.measure;
-
-import java.util.List;
-
-import org.carbondata.core.carbon.datastore.chunk.reader.MeasureColumnChunkReader;
-import org.carbondata.core.carbon.metadata.blocklet.datachunk.DataChunk;
-import org.carbondata.core.datastorage.store.compression.ValueCompressionModel;
-import org.carbondata.core.datastorage.store.compression.ValueCompressonHolder;
-import org.carbondata.core.datastorage.store.compression.ValueCompressonHolder.UnCompressValue;
-
-/**
- * Measure block reader abstract class
- */
-public abstract class AbstractMeasureChunkReader implements MeasureColumnChunkReader {
-
- /**
- * metadata which was to used to compress and uncompress the measure value
- */
- protected ValueCompressionModel compressionModel;
-
- /**
- * file path from which blocks will be read
- */
- protected String filePath;
-
- /**
- * measure chunk have the information about the metadata present in the file
- */
- protected List<DataChunk> measureColumnChunk;
-
- /**
- * type of valu comprssion model selected for each measure column
- */
- protected UnCompressValue[] values;
-
- /**
- * Constructor to get minimum parameter to create instance of this class
- *
- * @param measureColumnChunk measure chunk metadata
- * @param compression model metadata which was to used to compress and uncompress
- * the measure value
- * @param filePath file from which data will be read
- * @param isInMemory in case of in memory it will read and holds the data and when
- * query request will come it will uncompress and the data
- */
- public AbstractMeasureChunkReader(List<DataChunk> measureColumnChunk,
- ValueCompressionModel compressionModel, String filePath, boolean isInMemory) {
- this.measureColumnChunk = measureColumnChunk;
- this.compressionModel = compressionModel;
- this.filePath = filePath;
- values =
- new ValueCompressonHolder.UnCompressValue[compressionModel.getUnCompressValues().length];
- for (int i = 0; i < values.length; i++) {
- values[i] = compressionModel.getUnCompressValues()[i].getNew().getCompressorObject();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/reader/measure/CompressedMeasureChunkFileBasedReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/reader/measure/CompressedMeasureChunkFileBasedReader.java b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/reader/measure/CompressedMeasureChunkFileBasedReader.java
deleted file mode 100644
index d748cd9..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/reader/measure/CompressedMeasureChunkFileBasedReader.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.datastore.chunk.reader.measure;
-
-import java.util.List;
-
-import org.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
-import org.carbondata.core.carbon.metadata.blocklet.datachunk.DataChunk;
-import org.carbondata.core.datastorage.store.FileHolder;
-import org.carbondata.core.datastorage.store.compression.ValueCompressionModel;
-import org.carbondata.core.datastorage.store.compression.ValueCompressonHolder;
-import org.carbondata.core.datastorage.store.dataholder.CarbonReadDataHolder;
-
-/**
- * Compressed measure chunk reader
- */
-public class CompressedMeasureChunkFileBasedReader extends AbstractMeasureChunkReader {
-
- /**
- * Constructor to get minimum parameter to create instance of this class
- *
- * @param measureColumnChunk measure chunk metadata
- * @param compression model metadata which was to used to compress and uncompress
- * the measure value
- * @param filePath file from which data will be read
- */
- public CompressedMeasureChunkFileBasedReader(List<DataChunk> measureColumnChunk,
- ValueCompressionModel compressionModel, String filePath) {
- super(measureColumnChunk, compressionModel, filePath, false);
- }
-
- /**
- * Method to read the blocks data based on block indexes
- *
- * @param fileReader file reader to read the blocks
- * @param blockIndexes blocks to be read
- * @return measure data chunks
- */
- @Override public MeasureColumnDataChunk[] readMeasureChunks(FileHolder fileReader,
- int... blockIndexes) {
- MeasureColumnDataChunk[] datChunk = new MeasureColumnDataChunk[values.length];
- for (int i = 0; i < blockIndexes.length; i++) {
- datChunk[blockIndexes[i]] = readMeasureChunk(fileReader, blockIndexes[i]);
- }
- return datChunk;
- }
-
- /**
- * Method to read the blocks data based on block index
- *
- * @param fileReader file reader to read the blocks
- * @param blockIndex block to be read
- * @return measure data chunk
- */
- @Override public MeasureColumnDataChunk readMeasureChunk(FileHolder fileReader, int blockIndex) {
- MeasureColumnDataChunk datChunk = new MeasureColumnDataChunk();
- // create a new uncompressor
- ValueCompressonHolder.UnCompressValue copy = values[blockIndex].getNew();
- // read data from file and set to uncompressor
- copy.setValue(fileReader
- .readByteArray(filePath, measureColumnChunk.get(blockIndex).getDataPageOffset(),
- measureColumnChunk.get(blockIndex).getDataPageLength()));
- // get the data holder after uncompressing
- CarbonReadDataHolder measureDataHolder =
- copy.uncompress(compressionModel.getChangedDataType()[blockIndex])
- .getValues(compressionModel.getDecimal()[blockIndex],
- compressionModel.getMaxValue()[blockIndex]);
- // set the data chunk
- datChunk.setMeasureDataHolder(measureDataHolder);
- // set the enun value indexes
- datChunk
- .setNullValueIndexHolder(measureColumnChunk.get(blockIndex).getNullValueIndexForColumn());
- return datChunk;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/datastore/exception/IndexBuilderException.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/datastore/exception/IndexBuilderException.java b/core/src/main/java/org/carbondata/core/carbon/datastore/exception/IndexBuilderException.java
deleted file mode 100644
index 5ca33e3..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/exception/IndexBuilderException.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.carbondata.core.carbon.datastore.exception;
-
-import java.util.Locale;
-
-/**
- * Exception class for block builder
- *
- * @author Administrator
- */
-public class IndexBuilderException extends Exception {
- /**
- * default serial version ID.
- */
- private static final long serialVersionUID = 1L;
-
- /**
- * The Error message.
- */
- private String msg = "";
-
- /**
- * Constructor
- *
- * @param errorCode The error code for this exception.
- * @param msg The error message for this exception.
- */
- public IndexBuilderException(String msg) {
- super(msg);
- this.msg = msg;
- }
-
- /**
- * Constructor
- *
- * @param msg exception message
- * @param throwable detail exception
- */
- public IndexBuilderException(String msg, Throwable throwable) {
- super(msg, throwable);
- this.msg = msg;
- }
-
- /**
- * Constructor
- *
- * @param throwable exception
- */
- public IndexBuilderException(Throwable throwable) {
- super(throwable);
- }
-
- /**
- * This method is used to get the localized message.
- *
- * @param locale - A Locale object represents a specific geographical,
- * political, or cultural region.
- * @return - Localized error message.
- */
- public String getLocalizedMessage(Locale locale) {
- return "";
- }
-
- /**
- * getLocalizedMessage
- */
- @Override public String getLocalizedMessage() {
- return super.getLocalizedMessage();
- }
-
- /**
- * getMessage
- */
- public String getMessage() {
- return this.msg;
- }
-}
-
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/AbstractBTreeBuilder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/AbstractBTreeBuilder.java b/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/AbstractBTreeBuilder.java
deleted file mode 100644
index 88f03cc..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/AbstractBTreeBuilder.java
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.datastore.impl.btree;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.carbondata.core.carbon.datastore.BtreeBuilder;
-import org.carbondata.core.carbon.datastore.IndexKey;
-import org.carbondata.core.constants.CarbonCommonConstants;
-import org.carbondata.core.util.CarbonProperties;
-
-/**
- * Abstract Btree based builder
- */
-public abstract class AbstractBTreeBuilder implements BtreeBuilder {
-
- /**
- * default Number of keys per page
- */
- private static final int DEFAULT_NUMBER_OF_ENTRIES_NONLEAF = 32;
-
- /**
- * Maximum number of entries in intermediate nodes
- */
- protected int maxNumberOfEntriesInNonLeafNodes;
-
- /**
- * Number of leaf nodes
- */
- protected int nLeaf;
-
- /**
- * root node of a btree
- */
- protected BTreeNode root;
-
- public AbstractBTreeBuilder() {
- maxNumberOfEntriesInNonLeafNodes = Integer.parseInt(CarbonProperties.getInstance()
- .getProperty("com.huawei.datastore.internalnodesize",
- DEFAULT_NUMBER_OF_ENTRIES_NONLEAF + ""));
- }
-
- /**
- * Below method is to build the intermediate node of the btree
- *
- * @param curNode current node
- * @param childNodeGroups children group which will have all the children for
- * particular intermediate node
- * @param currentGroup current group
- * @param interNSKeyList list if keys
- * @param numberOfInternalNode number of internal node
- */
- protected void addIntermediateNode(BTreeNode curNode, List<BTreeNode[]> childNodeGroups,
- BTreeNode[] currentGroup, List<List<IndexKey>> interNSKeyList, int numberOfInternalNode) {
-
- int groupCounter;
- // Build internal nodes level by level. Each upper node can have
- // upperMaxEntry keys and upperMaxEntry+1 children
- int remainder;
- int nHigh = numberOfInternalNode;
- boolean bRootBuilt = false;
- remainder = nLeaf % (maxNumberOfEntriesInNonLeafNodes);
- List<IndexKey> interNSKeys = null;
- while (nHigh > 1 || !bRootBuilt) {
- List<BTreeNode[]> internalNodeGroups =
- new ArrayList<BTreeNode[]>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
- List<List<IndexKey>> interNSKeyTmpList =
- new ArrayList<List<IndexKey>>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
- numberOfInternalNode = 0;
- for (int i = 0; i < nHigh; i++) {
- // Create a new internal node
- curNode = new BTreeNonLeafNode();
- // Allocate a new node group if current node group is full
- groupCounter = i % (maxNumberOfEntriesInNonLeafNodes);
- if (groupCounter == 0) {
- // Create new node group
- currentGroup = new BTreeNonLeafNode[maxNumberOfEntriesInNonLeafNodes];
- internalNodeGroups.add(currentGroup);
- numberOfInternalNode++;
- interNSKeys = new ArrayList<IndexKey>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
- interNSKeyTmpList.add(interNSKeys);
- }
-
- // Add the new internal node to current group
- if (null != currentGroup) {
- currentGroup[groupCounter] = curNode;
- }
- int nNodes;
-
- if (i == nHigh - 1 && remainder != 0) {
- nNodes = remainder;
- } else {
- nNodes = maxNumberOfEntriesInNonLeafNodes;
- }
- // Point the internal node to its children node group
- curNode.setChildren(childNodeGroups.get(i));
- // Fill the internal node with keys based on its child nodes
- for (int j = 0; j < nNodes; j++) {
- curNode.setKey(interNSKeyList.get(i).get(j));
- if (j == 0 && null != interNSKeys) {
- interNSKeys.add(interNSKeyList.get(i).get(j));
-
- }
- }
- }
- // If nHigh is 1, we have the root node
- if (nHigh == 1) {
- bRootBuilt = true;
- }
-
- remainder = nHigh % (maxNumberOfEntriesInNonLeafNodes);
- nHigh = numberOfInternalNode;
- childNodeGroups = internalNodeGroups;
- interNSKeyList = interNSKeyTmpList;
- }
- root = curNode;
- }
-
- /**
- * Below method is to convert the start key
- * into fixed and variable length key.
- * data format<lenght><fixed length key><length><variable length key>
- *
- * @param startKey
- * @return Index key
- */
- protected IndexKey convertStartKeyToNodeEntry(byte[] startKey) {
- ByteBuffer buffer = ByteBuffer.wrap(startKey);
- buffer.rewind();
- int dictonaryKeySize = buffer.getInt();
- int nonDictonaryKeySize = buffer.getInt();
- byte[] dictionaryKey = new byte[dictonaryKeySize];
- buffer.get(dictionaryKey);
- byte[] nonDictionaryKey = new byte[nonDictonaryKeySize];
- buffer.get(nonDictionaryKey);
- IndexKey entry = new IndexKey(dictionaryKey, nonDictionaryKey);
- return entry;
- }
-
- /**
- * Below method will be used to get the first data block
- * in Btree case it will be root node
- */
- @Override public BTreeNode get() {
- return root;
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/AbstractBTreeLeafNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/AbstractBTreeLeafNode.java b/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/AbstractBTreeLeafNode.java
deleted file mode 100644
index b038afe..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/AbstractBTreeLeafNode.java
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.datastore.impl.btree;
-
-import org.carbondata.core.carbon.datastore.DataRefNode;
-import org.carbondata.core.carbon.datastore.IndexKey;
-import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
-import org.carbondata.core.datastorage.store.FileHolder;
-
-/**
- * Non leaf node abstract class
- */
-public abstract class AbstractBTreeLeafNode implements BTreeNode {
-
- /**
- * number of keys in a btree
- */
- protected int numberOfKeys;
-
- /**
- * node number
- */
- protected long nodeNumber;
-
- /**
- * Next node of the leaf
- */
- protected BTreeNode nextNode;
-
- /**
- * max key of the column this will be used to check whether this leaf will
- * be used for scanning or not
- */
- protected byte[][] maxKeyOfColumns;
-
- /**
- * min key of the column this will be used to check whether this leaf will
- * be used for scanning or not
- */
- protected byte[][] minKeyOfColumns;
-
- /**
- * Method to get the next block this can be used while scanning when
- * iterator of this class can be used iterate over blocks
- *
- * @return next block
- */
- @Override public int nodeSize() {
- return this.numberOfKeys;
- }
-
- /**
- * below method will used to set the next node
- *
- * @param nextNode
- */
- @Override public void setNextNode(BTreeNode nextNode) {
- this.nextNode = nextNode;
- }
-
- /**
- * Below method is to get the children based on index
- *
- * @param index children index
- * @return btree node
- */
- @Override public BTreeNode getChild(int index) {
- throw new UnsupportedOperationException("Operation not supported in case of leaf node");
- }
-
- /**
- * below method to set the node entry
- *
- * @param key node entry
- */
- @Override public void setKey(IndexKey key) {
- throw new UnsupportedOperationException("Operation not supported in case of leaf node");
- }
-
- /**
- * Method can be used to get the block index .This can be used when multiple
- * thread can be used scan group of blocks in that can we can assign the
- * some of the blocks to one thread and some to other
- *
- * @return block number
- */
- @Override public long nodeNumber() {
- return nodeNumber;
- }
-
- /**
- * This method will be used to get the max value of all the columns this can
- * be used in case of filter query
- *
- * @param max value of all the columns
- */
- @Override public byte[][] getColumnsMaxValue() {
- return maxKeyOfColumns;
- }
-
- /**
- * This method will be used to get the max value of all the columns this can
- * be used in case of filter query
- *
- * @param max value of all the columns
- */
- @Override public byte[][] getColumnsMinValue() {
- return minKeyOfColumns;
- }
-
- /**
- * to check whether node in a btree is a leaf node or not
- *
- * @return leaf node or not
- */
- @Override public boolean isLeafNode() {
- return true;
- }
-
- /**
- * Method to get the next block this can be used while scanning when
- * iterator of this class can be used iterate over blocks
- *
- * @return next block
- */
- @Override public DataRefNode getNextDataRefNode() {
- return nextNode;
- }
-
- /**
- * below method will return the one node indexes
- *
- * @return node entry array
- */
- @Override public IndexKey[] getNodeKeys() {
- // as this is a leaf node so this method implementation is not required
- throw new UnsupportedOperationException("Operation not supported in case of leaf node");
- }
-
- /**
- * below method will be used to set the children of intermediate node
- *
- * @param children array
- */
- @Override public void setChildren(BTreeNode[] children) {
- // no required in case of leaf node as leaf node will not have any children
- throw new UnsupportedOperationException("Operation not supported in case of leaf node");
- }
-
- /**
- * Below method will be used to get the dimension chunks
- *
- * @param fileReader file reader to read the chunks from file
- * @param blockIndexes indexes of the blocks need to be read
- * @return dimension data chunks
- */
- @Override public DimensionColumnDataChunk[] getDimensionChunks(FileHolder fileReader,
- int[] blockIndexes) {
- // No required here as leaf which will will be use this class will implement its own get
- // dimension chunks
- return null;
- }
-
- /**
- * Below method will be used to get the dimension chunk
- *
- * @param fileReader file reader to read the chunk from file
- * @param blockIndex block index to be read
- * @return dimension data chunk
- */
- @Override public DimensionColumnDataChunk getDimensionChunk(FileHolder fileReader,
- int blockIndex) {
- // No required here as leaf which will will be use this class will implement
- // its own get dimension chunks
- return null;
- }
-
- /**
- * Below method will be used to get the measure chunk
- *
- * @param fileReader file reader to read the chunk from file
- * @param blockIndexes block indexes to be read from file
- * @return measure column data chunk
- */
- @Override public MeasureColumnDataChunk[] getMeasureChunks(FileHolder fileReader,
- int[] blockIndexes) {
- // No required here as leaf which will will be use this class will implement its own get
- // measure chunks
- return null;
- }
-
- /**
- * Below method will be used to read the measure chunk
- *
- * @param fileReader file read to read the file chunk
- * @param blockIndex block index to be read from file
- * @return measure data chunk
- */
- @Override public MeasureColumnDataChunk getMeasureChunk(FileHolder fileReader, int blockIndex) {
- // No required here as leaf which will will be use this class will implement its own get
- // measure chunks
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BTreeDataRefNodeFinder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BTreeDataRefNodeFinder.java b/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BTreeDataRefNodeFinder.java
deleted file mode 100644
index e60dad7..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BTreeDataRefNodeFinder.java
+++ /dev/null
@@ -1,264 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.datastore.impl.btree;
-
-import java.nio.ByteBuffer;
-
-import org.carbondata.core.carbon.datastore.DataRefNode;
-import org.carbondata.core.carbon.datastore.DataRefNodeFinder;
-import org.carbondata.core.carbon.datastore.IndexKey;
-import org.carbondata.core.util.ByteUtil;
-
-/**
- * Below class will be used to find a block in a btree
- */
-public class BTreeDataRefNodeFinder implements DataRefNodeFinder {
-
- /**
- * no dictionary column value is of variable length so in each column value
- * it will -1
- */
- private static final int NO_DCITIONARY_COLUMN_VALUE = -1;
-
- /**
- * sized of the short value in bytes
- */
- private static final short SHORT_SIZE_IN_BYTES = 2;
- /**
- * this will holds the information about the size of each value of a column,
- * this will be used during Comparison of the btree node value and the
- * search value if value is more than zero then its a fixed length column
- * else its variable length column. So as data of both type of column store
- * separately so this value size array will be used for both purpose
- * comparison and jumping(which type value we need to compare)
- */
- private int[] eachColumnValueSize;
-
- /**
- * this will be used during search for no dictionary column
- */
- private int numberOfNoDictionaryColumns;
-
- public BTreeDataRefNodeFinder(int[] eachColumnValueSize) {
- this.eachColumnValueSize = eachColumnValueSize;
-
- for (int i = 0; i < eachColumnValueSize.length; i++) {
- if (eachColumnValueSize[i] == -1) {
- numberOfNoDictionaryColumns++;
- }
- }
- }
-
- /**
- * Below method will be used to get the first tentative data block based on
- * search key
- *
- * @param dataBlocks complete data blocks present
- * @param serachKey key to be search
- * @return data block
- */
- @Override public DataRefNode findFirstDataBlock(DataRefNode dataRefBlock, IndexKey searchKey) {
- // as its for btree type cast it to btree interface
- BTreeNode rootNode = (BTreeNode) dataRefBlock;
- while (!rootNode.isLeafNode()) {
- rootNode = findFirstLeafNode(searchKey, rootNode);
- }
- return rootNode;
- }
-
- /**
- * Below method will be used to get the last data tentative block based on
- * search key
- *
- * @param dataBlocks complete data blocks present
- * @param serachKey key to be search
- * @return data block
- */
- @Override public DataRefNode findLastDataBlock(DataRefNode dataRefBlock, IndexKey searchKey) {
- // as its for btree type cast it to btree interface
- BTreeNode rootNode = (BTreeNode) dataRefBlock;
- while (!rootNode.isLeafNode()) {
- rootNode = findLastLeafNode(searchKey, rootNode);
- }
- return rootNode;
- }
-
- /**
- * Binary search used to get the first tentative block of the btree based on
- * search key
- *
- * @param key search key
- * @param node root node of btree
- * @return first tentative block
- */
- private BTreeNode findFirstLeafNode(IndexKey key, BTreeNode node) {
- int childNodeIndex;
- int low = 0;
- int high = node.nodeSize() - 1;
- int mid = 0;
- int compareRes = -1;
- IndexKey[] nodeKeys = node.getNodeKeys();
- //
- while (low <= high) {
- mid = (low + high) >>> 1;
- // compare the entries
- compareRes = compareIndexes(key, nodeKeys[mid]);
- if (compareRes < 0) {
- high = mid - 1;
- } else if (compareRes > 0) {
- low = mid + 1;
- } else {
- // if key is matched then get the first entry
- int currentPos = mid;
- while (currentPos - 1 >= 0 && compareIndexes(key, nodeKeys[currentPos - 1]) == 0) {
- currentPos--;
- }
- mid = currentPos;
- break;
- }
- }
- // if compare result is less than zero then we
- // and mid is more than 0 then we need to previous block as duplicates
- // record can be present
- if (compareRes < 0) {
- if (mid > 0) {
- mid--;
- }
- childNodeIndex = mid;
- } else {
- childNodeIndex = mid;
- }
- // get the leaf child
- node = node.getChild(childNodeIndex);
- return node;
- }
-
- /**
- * Binary search used to get the last tentative block of the btree based on
- * search key
- *
- * @param key search key
- * @param node root node of btree
- * @return first tentative block
- */
- private BTreeNode findLastLeafNode(IndexKey key, BTreeNode node) {
- int childNodeIndex;
- int low = 0;
- int high = node.nodeSize() - 1;
- int mid = 0;
- int compareRes = -1;
- IndexKey[] nodeKeys = node.getNodeKeys();
- //
- while (low <= high) {
- mid = (low + high) >>> 1;
- // compare the entries
- compareRes = compareIndexes(key, nodeKeys[mid]);
- if (compareRes < 0) {
- high = mid - 1;
- } else if (compareRes > 0) {
- low = mid + 1;
- } else {
- int currentPos = mid;
- // if key is matched then get the first entry
- while (currentPos + 1 < node.nodeSize()
- && compareIndexes(key, nodeKeys[currentPos + 1]) == 0) {
- currentPos++;
- }
- mid = currentPos;
- break;
- }
- }
- // if compare result is less than zero then we
- // and mid is more than 0 then we need to previous block as duplicates
- // record can be present
- if (compareRes < 0) {
- if (mid > 0) {
- mid--;
- }
- childNodeIndex = mid;
- } else {
- childNodeIndex = mid;
- }
- node = node.getChild(childNodeIndex);
- return node;
- }
-
- /**
- * Comparison of index key will be following format of key <Dictionary> key
- * will be in byte array No dictionary key Index of FirstKey (2
- * bytes)><Index of SecondKey (2 bytes)><Index of NKey (2 bytes)> <First Key
- * ByteArray><2nd Key ByteArray><N Key ByteArray> in each column value size
- * of no dictionary column will be -1 if in each column value is not -1 then
- * compare the byte array based on size and increment the offset to
- * dictionary column size if size is -1 then its a no dictionary key so to
- * get the length subtract the size of current with next key offset it will
- * give the actual length if it is at last position or only one key is
- * present then subtract with length
- *
- * @param first key
- * @param second key
- * @return comparison value
- */
- private int compareIndexes(IndexKey first, IndexKey second) {
- int dictionaryKeyOffset = 0;
- int nonDictionaryKeyOffset = 0;
- int compareResult = 0;
- int processedNoDictionaryColumn = numberOfNoDictionaryColumns;
- ByteBuffer firstNoDictionaryKeyBuffer = ByteBuffer.wrap(first.getNoDictionaryKeys());
- ByteBuffer secondNoDictionaryKeyBuffer = ByteBuffer.wrap(second.getNoDictionaryKeys());
- int actualOffset = 0;
- int firstNoDcitionaryLength = 0;
- int secondNodeDictionaryLength = 0;
-
- for (int i = 0; i < eachColumnValueSize.length; i++) {
-
- if (eachColumnValueSize[i] != NO_DCITIONARY_COLUMN_VALUE) {
- compareResult = ByteUtil.UnsafeComparer.INSTANCE
- .compareTo(first.getDictionaryKeys(), dictionaryKeyOffset, eachColumnValueSize[i],
- second.getDictionaryKeys(), dictionaryKeyOffset, eachColumnValueSize[i]);
- dictionaryKeyOffset += eachColumnValueSize[i];
- } else {
- if (processedNoDictionaryColumn > 1) {
- actualOffset = firstNoDictionaryKeyBuffer.getShort(nonDictionaryKeyOffset);
- firstNoDcitionaryLength =
- firstNoDictionaryKeyBuffer.getShort(nonDictionaryKeyOffset + SHORT_SIZE_IN_BYTES);
- secondNodeDictionaryLength =
- secondNoDictionaryKeyBuffer.getShort(nonDictionaryKeyOffset + SHORT_SIZE_IN_BYTES);
- compareResult = ByteUtil.UnsafeComparer.INSTANCE
- .compareTo(first.getNoDictionaryKeys(), actualOffset, firstNoDcitionaryLength,
- second.getNoDictionaryKeys(), actualOffset, secondNodeDictionaryLength);
- nonDictionaryKeyOffset += SHORT_SIZE_IN_BYTES;
- processedNoDictionaryColumn--;
- } else {
- actualOffset = firstNoDictionaryKeyBuffer.getShort(nonDictionaryKeyOffset);
- firstNoDcitionaryLength = first.getNoDictionaryKeys().length - actualOffset;
- secondNodeDictionaryLength = second.getNoDictionaryKeys().length - actualOffset;
- compareResult = ByteUtil.UnsafeComparer.INSTANCE
- .compareTo(first.getNoDictionaryKeys(), actualOffset, firstNoDcitionaryLength,
- second.getNoDictionaryKeys(), actualOffset, secondNodeDictionaryLength);
- }
- }
- if (compareResult != 0) {
- return compareResult;
- }
- }
-
- return 0;
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BTreeNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BTreeNode.java b/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BTreeNode.java
deleted file mode 100644
index 9468cb5..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BTreeNode.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.datastore.impl.btree;
-
-import org.carbondata.core.carbon.datastore.DataRefNode;
-import org.carbondata.core.carbon.datastore.IndexKey;
-
-/**
- * Interface for btree node
- */
-public interface BTreeNode extends DataRefNode {
-
- /**
- * below method will return the one node indexes
- *
- * @return node entry array
- */
- IndexKey[] getNodeKeys();
-
- /**
- * to check whether node in a btree is a leaf node or not
- *
- * @return leaf node or not
- */
- boolean isLeafNode();
-
- /**
- * below method will be used to set the children of intermediate node
- *
- * @param children array
- */
- void setChildren(BTreeNode[] children);
-
- /**
- * below method will used to set the next node
- *
- * @param nextNode
- */
- void setNextNode(BTreeNode nextNode);
-
- /**
- * Below method is to get the children based on index
- *
- * @param index children index
- * @return btree node
- */
- BTreeNode getChild(int index);
-
- /**
- * below method to set the node entry
- *
- * @param key node entry
- */
- void setKey(IndexKey key);
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BTreeNonLeafNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BTreeNonLeafNode.java b/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BTreeNonLeafNode.java
deleted file mode 100644
index ad49c0b..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BTreeNonLeafNode.java
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.datastore.impl.btree;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.carbondata.core.carbon.datastore.DataRefNode;
-import org.carbondata.core.carbon.datastore.IndexKey;
-import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
-import org.carbondata.core.constants.CarbonCommonConstants;
-import org.carbondata.core.datastorage.store.FileHolder;
-
-/**
- * No leaf node of a b+tree class which will keep the matadata(start key) of the
- * leaf node
- */
-public class BTreeNonLeafNode implements BTreeNode {
-
- /**
- * Child nodes
- */
- private BTreeNode[] children;
-
- /**
- * list of keys in non leaf
- */
- private List<IndexKey> listOfKeys;
-
- public BTreeNonLeafNode() {
- // creating a list which will store all the indexes
- listOfKeys = new ArrayList<IndexKey>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
- }
-
- /**
- * below method will return the one node indexes
- *
- * @return getting a complete leaf ]node keys
- */
- @Override public IndexKey[] getNodeKeys() {
- return listOfKeys.toArray(new IndexKey[listOfKeys.size()]);
- }
-
- /**
- * as it is a non leaf node it will have the reference of all the leaf node
- * under it, setting all the children
- *
- * @param leaf nodes
- */
- @Override public void setChildren(BTreeNode[] children) {
- this.children = children;
- }
-
- /**
- * setting the next node
- */
- @Override public void setNextNode(BTreeNode nextNode) {
- // no required in case of non leaf node
- }
-
- /**
- * get the leaf node based on children
- *
- * @return leaf node
- */
- @Override public BTreeNode getChild(int index) {
- return this.children[index];
- }
-
- /**
- * add a key of a leaf node
- *
- * @param leaf node start keys
- */
- @Override public void setKey(IndexKey key) {
- listOfKeys.add(key);
-
- }
-
- /**
- * @return whether its a leaf node or not
- */
- @Override public boolean isLeafNode() {
- return false;
- }
-
- /**
- * Method to get the next block this can be used while scanning when
- * iterator of this class can be used iterate over blocks
- *
- * @return next block
- */
- @Override public DataRefNode getNextDataRefNode() {
- throw new UnsupportedOperationException("Unsupported operation");
- }
-
- /**
- * to get the number of keys tuples present in the block
- *
- * @return number of keys in the block
- */
- @Override public int nodeSize() {
- return listOfKeys.size();
- }
-
- /**
- * Method can be used to get the block index .This can be used when multiple
- * thread can be used scan group of blocks in that can we can assign the
- * some of the blocks to one thread and some to other
- *
- * @return block number
- */
- @Override public long nodeNumber() {
- throw new UnsupportedOperationException("Unsupported operation");
- }
-
- /**
- * This method will be used to get the max value of all the columns this can
- * be used in case of filter query
- *
- * @param max value of all the columns
- */
- @Override public byte[][] getColumnsMaxValue() {
- // operation of getting the max value is not supported as its a non leaf
- // node
- // and in case of B+Tree data will be stored only in leaf node and
- // intermediate
- // node will be used only for searching the leaf node
- throw new UnsupportedOperationException("Unsupported operation");
- }
-
- /**
- * This method will be used to get the max value of all the columns this can
- * be used in case of filter query
- *
- * @param min value of all the columns
- */
- @Override public byte[][] getColumnsMinValue() {
- // operation of getting the min value is not supported as its a non leaf
- // node
- // and in case of B+Tree data will be stored only in leaf node and
- // intermediate
- // node will be used only for searching the leaf node
- throw new UnsupportedOperationException("Unsupported operation");
- }
-
- /**
- * Below method will be used to get the dimension chunks
- *
- * @param fileReader file reader to read the chunks from file
- * @param blockIndexes indexes of the blocks need to be read
- * @return dimension data chunks
- */
- @Override public DimensionColumnDataChunk[] getDimensionChunks(FileHolder fileReader,
- int[] blockIndexes) {
-
- // operation of getting the dimension chunks is not supported as its a
- // non leaf node
- // and in case of B+Tree data will be stored only in leaf node and
- // intermediate
- // node will be used only for searching the leaf node
- throw new UnsupportedOperationException("Unsupported operation");
- }
-
- /**
- * Below method will be used to get the dimension chunk
- *
- * @param fileReader file reader to read the chunk from file
- * @param blockIndex block index to be read
- * @return dimension data chunk
- */
- @Override public DimensionColumnDataChunk getDimensionChunk(FileHolder fileReader,
- int blockIndexes) {
- // operation of getting the dimension chunk is not supported as its a
- // non leaf node
- // and in case of B+Tree data will be stored only in leaf node and
- // intermediate
- // node will be used only for searching the leaf node
- throw new UnsupportedOperationException("Unsupported operation");
- }
-
- /**
- * Below method will be used to get the measure chunk
- *
- * @param fileReader file reader to read the chunk from file
- * @param blockIndexes block indexes to be read from file
- * @return measure column data chunk
- */
- @Override public MeasureColumnDataChunk[] getMeasureChunks(FileHolder fileReader,
- int[] blockIndexes) {
- // operation of getting the measure chunk is not supported as its a non
- // leaf node
- // and in case of B+Tree data will be stored only in leaf node and
- // intermediate
- // node will be used only for searching the leaf node
- throw new UnsupportedOperationException("Unsupported operation");
- }
-
- /**
- * Below method will be used to read the measure chunk
- *
- * @param fileReader file read to read the file chunk
- * @param blockIndex block index to be read from file
- * @return measure data chunk
- */
-
- @Override public MeasureColumnDataChunk getMeasureChunk(FileHolder fileReader, int blockIndex) {
- // operation of getting the measure chunk is not supported as its a non
- // leaf node
- // and in case of B+Tree data will be stored only in leaf node and
- // intermediate
- // node will be used only for searching the leaf node
- throw new UnsupportedOperationException("Unsupported operation");
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockBTreeBuilder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockBTreeBuilder.java b/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockBTreeBuilder.java
deleted file mode 100644
index 7ff3929..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockBTreeBuilder.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.datastore.impl.btree;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.carbondata.common.logging.LogService;
-import org.carbondata.common.logging.LogServiceFactory;
-import org.carbondata.core.carbon.datastore.BTreeBuilderInfo;
-import org.carbondata.core.carbon.datastore.IndexKey;
-import org.carbondata.core.constants.CarbonCommonConstants;
-
-/**
- * Below class will be used to build the btree BTree will be built for all the
- * blocks of a segment
- */
-public class BlockBTreeBuilder extends AbstractBTreeBuilder {
-
- /**
- * Attribute for Carbon LOGGER
- */
- private static final LogService LOGGER =
- LogServiceFactory.getLogService(BlockBTreeBuilder.class.getName());
-
- /**
- * Below method will be used to build the segment info bplus tree format
- * Tree will be a read only tree, and it will be build on Bottoms up
- * approach first all the leaf node will be built and then intermediate node
- * in our case one leaf node will have not only one entry it will have group
- * of entries
- */
- @Override public void build(BTreeBuilderInfo btreeBuilderInfo) {
- int groupCounter;
- int nInternal = 0;
- BTreeNode curNode = null;
- BTreeNode prevNode = null;
- List<BTreeNode[]> nodeGroups =
- new ArrayList<BTreeNode[]>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
- BTreeNode[] currentGroup = null;
- List<List<IndexKey>> interNSKeyList =
- new ArrayList<List<IndexKey>>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
- List<IndexKey> leafNSKeyList = null;
- long nodeNumber = 0;
- for (int metadataIndex = 0;
- metadataIndex < btreeBuilderInfo.getFooterList().size(); metadataIndex++) {
- // creating a leaf node
- curNode = new BlockBTreeLeafNode(btreeBuilderInfo, metadataIndex, nodeNumber++);
- nLeaf++;
- // setting a next node as its a b+tree
- // so all the leaf node will be chained
- // will be stored in linked list
- if (prevNode != null) {
- prevNode.setNextNode(curNode);
- }
- prevNode = curNode;
- // as intermediate node will have more than one leaf
- // in cerating a group
- groupCounter = (nLeaf - 1) % (maxNumberOfEntriesInNonLeafNodes);
- if (groupCounter == 0) {
- // Create new node group if current group is full
- leafNSKeyList = new ArrayList<IndexKey>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
- currentGroup = new BTreeNode[maxNumberOfEntriesInNonLeafNodes];
- nodeGroups.add(currentGroup);
- nInternal++;
- interNSKeyList.add(leafNSKeyList);
- }
- if (null != leafNSKeyList) {
- leafNSKeyList.add(convertStartKeyToNodeEntry(
- btreeBuilderInfo.getFooterList().get(metadataIndex).getBlockletIndex()
- .getBtreeIndex().getStartKey()));
- }
- if (null != currentGroup) {
- currentGroup[groupCounter] = curNode;
- }
- }
- if (nLeaf == 0) {
- return;
- }
- // adding a intermediate node
- addIntermediateNode(curNode, nodeGroups, currentGroup, interNSKeyList, nInternal);
- LOGGER.info("************************Total Number Rows In BTREE: " + nLeaf);
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockBTreeLeafNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockBTreeLeafNode.java b/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockBTreeLeafNode.java
deleted file mode 100644
index 6b63961..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockBTreeLeafNode.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.datastore.impl.btree;
-
-import org.carbondata.core.carbon.datastore.BTreeBuilderInfo;
-import org.carbondata.core.carbon.datastore.block.TableBlockInfo;
-import org.carbondata.core.carbon.metadata.blocklet.DataFileFooter;
-import org.carbondata.core.carbon.metadata.blocklet.index.BlockletMinMaxIndex;
-
-/**
- * Leaf node for btree where only min max will be store this can be used from
- * driver when only we need to find whether particular block be selected for
- * query execution
- */
-public class BlockBTreeLeafNode extends AbstractBTreeLeafNode {
-
- private TableBlockInfo blockInfo;
-
- /**
- * Create a leaf node
- *
- * @param builderInfos builder infos which have required metadata to create a leaf
- * node
- * @param leafIndex leaf node index
- * @param metadataIndex metadata index
- */
- public BlockBTreeLeafNode(BTreeBuilderInfo builderInfos, int metadataIndex, long nodeNumber) {
- DataFileFooter footer = builderInfos.getFooterList().get(metadataIndex);
- BlockletMinMaxIndex minMaxIndex = footer.getBlockletIndex().getMinMaxIndex();
- maxKeyOfColumns = minMaxIndex.getMaxValues();
- minKeyOfColumns = minMaxIndex.getMinValues();
- numberOfKeys = 1;
- this.nodeNumber = nodeNumber;
- this.blockInfo = footer.getTableBlockInfo();
- }
-
- /**
- * Below method is to get the table block info
- * This will be used only in case of BlockBtree leaf node which will
- * be used to from driver
- *
- * @return TableBlockInfo
- */
- public TableBlockInfo getTableBlockInfo() {
- return blockInfo;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockletBTreeBuilder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockletBTreeBuilder.java b/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockletBTreeBuilder.java
deleted file mode 100644
index 0cc62f7..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockletBTreeBuilder.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.datastore.impl.btree;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.carbondata.common.logging.LogService;
-import org.carbondata.common.logging.LogServiceFactory;
-import org.carbondata.core.carbon.datastore.BTreeBuilderInfo;
-import org.carbondata.core.carbon.datastore.IndexKey;
-import org.carbondata.core.constants.CarbonCommonConstants;
-
-/**
- * Btree based builder which will build the leaf node in a b+ tree format
- */
-public class BlockletBTreeBuilder extends AbstractBTreeBuilder {
-
- /**
- * Attribute for Carbon LOGGER
- */
- private static final LogService LOGGER =
- LogServiceFactory.getLogService(BlockletBTreeBuilder.class.getName());
-
- /**
- * Below method will be used to build the segment info bplus tree format
- * Tree will be a read only tree, and it will be build on Bottoms up approach
- * first all the leaf node will be built and then intermediate node
- * in our case one leaf node will have not only one entry it will have group of entries
- */
- @Override public void build(BTreeBuilderInfo segmentBuilderInfos) {
- long totalNumberOfTuple = 0;
- int groupCounter;
- int nInternal = 0;
- BTreeNode curNode = null;
- BTreeNode prevNode = null;
- List<BTreeNode[]> nodeGroups =
- new ArrayList<BTreeNode[]>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
- BTreeNode[] currentGroup = null;
- List<List<IndexKey>> interNSKeyList =
- new ArrayList<List<IndexKey>>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
- List<IndexKey> leafNSKeyList = null;
- long nodeNumber = 0;
- for (int index = 0;
- index < segmentBuilderInfos.getFooterList().get(0).getBlockletList()
- .size(); index++) {
- // creating a leaf node
- curNode = new BlockletBTreeLeafNode(segmentBuilderInfos, index, nodeNumber++);
- totalNumberOfTuple +=
- segmentBuilderInfos.getFooterList().get(0).getBlockletList().get(index)
- .getNumberOfRows();
- nLeaf++;
- // setting a next node as its a b+tree
- // so all the leaf node will be chained
- // will be stored in linked list
- if (prevNode != null) {
- prevNode.setNextNode(curNode);
- }
- prevNode = curNode;
- // as intermediate node will have more than one leaf
- // in cerating a group
- groupCounter = (nLeaf - 1) % (maxNumberOfEntriesInNonLeafNodes);
- if (groupCounter == 0) {
- // Create new node group if current group is full
- leafNSKeyList = new ArrayList<IndexKey>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
- currentGroup = new BTreeNode[maxNumberOfEntriesInNonLeafNodes];
- nodeGroups.add(currentGroup);
- nInternal++;
- interNSKeyList.add(leafNSKeyList);
- }
- if (null != leafNSKeyList) {
- leafNSKeyList.add(convertStartKeyToNodeEntry(
- segmentBuilderInfos.getFooterList().get(0).getBlockletList().get(index)
- .getBlockletIndex().getBtreeIndex().getStartKey()));
- }
- if (null != currentGroup) {
- currentGroup[groupCounter] = curNode;
- }
- }
- if (totalNumberOfTuple == 0) {
- return;
- }
- // adding a intermediate node
- addIntermediateNode(curNode, nodeGroups, currentGroup, interNSKeyList, nInternal);
- LOGGER.info("****************************Total Number Rows In BTREE: " + totalNumberOfTuple);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockletBTreeLeafNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockletBTreeLeafNode.java b/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockletBTreeLeafNode.java
deleted file mode 100644
index 12cadf4..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/impl/btree/BlockletBTreeLeafNode.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.datastore.impl.btree;
-
-import org.carbondata.core.carbon.datastore.BTreeBuilderInfo;
-import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
-import org.carbondata.core.carbon.datastore.chunk.reader.DimensionColumnChunkReader;
-import org.carbondata.core.carbon.datastore.chunk.reader.MeasureColumnChunkReader;
-import org.carbondata.core.carbon.datastore.chunk.reader.dimension.CompressedDimensionChunkFileBasedReader;
-import org.carbondata.core.carbon.datastore.chunk.reader.measure.CompressedMeasureChunkFileBasedReader;
-import org.carbondata.core.carbon.metadata.blocklet.index.BlockletMinMaxIndex;
-import org.carbondata.core.datastorage.store.FileHolder;
-import org.carbondata.core.datastorage.store.compression.ValueCompressionModel;
-import org.carbondata.core.util.CarbonUtil;
-
-/**
- * Leaf node class of a Blocklet btree
- */
-public class BlockletBTreeLeafNode extends AbstractBTreeLeafNode {
-
- /**
- * reader for dimension chunk
- */
- private DimensionColumnChunkReader dimensionChunksReader;
-
- /**
- * reader of measure chunk
- */
- private MeasureColumnChunkReader measureColumnChunkReader;
-
- /**
- * Create a leaf node
- *
- * @param builderInfos builder infos which have required metadata to create a leaf node
- * @param leafIndex leaf node index
- * @param nodeNumber node number of the node
- * this will be used during query execution when we can
- * give some leaf node of a btree to one executor some to other
- */
- public BlockletBTreeLeafNode(BTreeBuilderInfo builderInfos, int leafIndex, long nodeNumber) {
- // get a lead node min max
- BlockletMinMaxIndex minMaxIndex =
- builderInfos.getFooterList().get(0).getBlockletList().get(leafIndex)
- .getBlockletIndex().getMinMaxIndex();
- // max key of the columns
- maxKeyOfColumns = minMaxIndex.getMaxValues();
- // min keys of the columns
- minKeyOfColumns = minMaxIndex.getMinValues();
- // number of keys present in the leaf
- numberOfKeys = builderInfos.getFooterList().get(0).getBlockletList().get(leafIndex)
- .getNumberOfRows();
- // create a instance of dimension chunk
- dimensionChunksReader = new CompressedDimensionChunkFileBasedReader(
- builderInfos.getFooterList().get(0).getBlockletList().get(leafIndex)
- .getDimensionColumnChunk(), builderInfos.getDimensionColumnValueSize(),
- builderInfos.getFooterList().get(0).getTableBlockInfo().getFilePath());
- // get the value compression model which was used to compress the measure values
- ValueCompressionModel valueCompressionModel = CarbonUtil.getValueCompressionModel(
- builderInfos.getFooterList().get(0).getBlockletList().get(leafIndex)
- .getMeasureColumnChunk());
- // create a instance of measure column chunk reader
- measureColumnChunkReader = new CompressedMeasureChunkFileBasedReader(
- builderInfos.getFooterList().get(0).getBlockletList().get(leafIndex)
- .getMeasureColumnChunk(), valueCompressionModel,
- builderInfos.getFooterList().get(0).getTableBlockInfo().getFilePath());
- this.nodeNumber = nodeNumber;
- }
-
- /**
- * Below method will be used to get the dimension chunks
- *
- * @param fileReader file reader to read the chunks from file
- * @param blockIndexes indexes of the blocks need to be read
- * @return dimension data chunks
- */
- @Override public DimensionColumnDataChunk[] getDimensionChunks(FileHolder fileReader,
- int[] blockIndexes) {
- return dimensionChunksReader.readDimensionChunks(fileReader, blockIndexes);
- }
-
- /**
- * Below method will be used to get the dimension chunk
- *
- * @param fileReader file reader to read the chunk from file
- * @param blockIndex block index to be read
- * @return dimension data chunk
- */
- @Override public DimensionColumnDataChunk getDimensionChunk(FileHolder fileReader,
- int blockIndex) {
- return dimensionChunksReader.readDimensionChunk(fileReader, blockIndex);
- }
-
- /**
- * Below method will be used to get the measure chunk
- *
- * @param fileReader file reader to read the chunk from file
- * @param blockIndexes block indexes to be read from file
- * @return measure column data chunk
- */
- @Override public MeasureColumnDataChunk[] getMeasureChunks(FileHolder fileReader,
- int[] blockIndexes) {
- return measureColumnChunkReader.readMeasureChunks(fileReader, blockIndexes);
- }
-
- /**
- * Below method will be used to read the measure chunk
- *
- * @param fileReader file read to read the file chunk
- * @param blockIndex block index to be read from file
- * @return measure data chunk
- */
- @Override public MeasureColumnDataChunk getMeasureChunk(FileHolder fileReader, int blockIndex) {
- return measureColumnChunkReader.readMeasureChunk(fileReader, blockIndex);
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/metadata/CarbonMetadata.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/metadata/CarbonMetadata.java b/core/src/main/java/org/carbondata/core/carbon/metadata/CarbonMetadata.java
deleted file mode 100644
index 11f159b..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/metadata/CarbonMetadata.java
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.metadata;
-
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.carbondata.core.carbon.metadata.schema.table.CarbonTable;
-import org.carbondata.core.carbon.metadata.schema.table.TableInfo;
-import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
-
-/**
- * Class which persist the information about the tables present the carbon schemas
- */
-public final class CarbonMetadata {
-
- /**
- * meta data instance
- */
- private static final CarbonMetadata CARBONMETADATAINSTANCE = new CarbonMetadata();
-
- /**
- * holds the list of tableInfo currently present
- */
- private Map<String, CarbonTable> tableInfoMap;
-
- private CarbonMetadata() {
- // creating a concurrent map as it will be updated by multiple thread
- tableInfoMap = new ConcurrentHashMap<String, CarbonTable>();
- }
-
- public static CarbonMetadata getInstance() {
- return CARBONMETADATAINSTANCE;
- }
-
- /**
- * removed the table information
- *
- * @param tableUniquName
- */
- public void removeTable(String tableUniquName) {
- tableInfoMap.remove(convertToLowerCase(tableUniquName));
- }
-
- /**
- * Below method will be used to set the carbon table
- * This method will be used in executor side as driver will always have
- * updated table so from driver during query execution and data loading
- * we just need to add the table
- *
- * @param carbonTable
- */
- public void addCarbonTable(CarbonTable carbonTable) {
- tableInfoMap.put(convertToLowerCase(carbonTable.getTableUniqueName()), carbonTable);
- }
-
- /**
- * method load the table
- *
- * @param tableInfo
- */
- public void loadTableMetadata(TableInfo tableInfo) {
- CarbonTable carbonTable = tableInfoMap.get(convertToLowerCase(tableInfo.getTableUniqueName()));
- if (null == carbonTable || carbonTable.getTableLastUpdatedTime() < tableInfo
- .getLastUpdatedTime()) {
- carbonTable = new CarbonTable();
- carbonTable.loadCarbonTable(tableInfo);
- tableInfoMap.put(convertToLowerCase(tableInfo.getTableUniqueName()), carbonTable);
- }
- }
-
- /**
- * Below method to get the loaded carbon table
- *
- * @param tableUniqueName
- * @return
- */
- public CarbonTable getCarbonTable(String tableUniqueName) {
- return tableInfoMap.get(convertToLowerCase(tableUniqueName));
- }
-
- /**
- * @return the number of tables present in the schema
- */
- public int getNumberOfTables() {
- return tableInfoMap.size();
- }
-
- /**
- * returns the given string in lowercase
- * @param table
- * @return
- */
- public String convertToLowerCase(String table) {
- return table.toLowerCase();
- }
-
- /**
- * method will return dimension instance based on the column identifier
- * and table instance passed to it.
- *
- * @param carbonTable
- * @param columnIdentifier
- * @return CarbonDimension instance
- */
- public CarbonDimension getCarbonDimensionBasedOnColIdentifier(CarbonTable carbonTable,
- String columnIdentifier) {
- List<CarbonDimension> listOfCarbonDims =
- carbonTable.getDimensionByTableName(carbonTable.getFactTableName());
- for (CarbonDimension dimension : listOfCarbonDims) {
- if (dimension.getColumnId().equals(columnIdentifier)) {
- return dimension;
- }
- if (dimension.numberOfChild() > 0) {
- CarbonDimension childDim =
- getCarbonChildDimsBasedOnColIdentifier(columnIdentifier, dimension);
- if (null != childDim) {
- return childDim;
- }
- }
- }
- return null;
- }
-
- /**
- * Below method will be used to get the dimension based on column identifier
- * for complex dimension children
- *
- * @param columnIdentifier column identifier
- * @param dimension parent dimension
- * @return children dimension
- */
- private CarbonDimension getCarbonChildDimsBasedOnColIdentifier(String columnIdentifier,
- CarbonDimension dimension) {
- for (int i = 0; i < dimension.numberOfChild(); i++) {
- if (dimension.getListOfChildDimensions().get(i).getColumnId().equals(columnIdentifier)) {
- return dimension.getListOfChildDimensions().get(i);
- } else if (dimension.getListOfChildDimensions().get(i).numberOfChild() > 0) {
- CarbonDimension childDim = getCarbonChildDimsBasedOnColIdentifier(columnIdentifier,
- dimension.getListOfChildDimensions().get(i));
- if (null != childDim) {
- return childDim;
- }
- }
- }
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/BlockletInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/BlockletInfo.java b/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/BlockletInfo.java
deleted file mode 100644
index de998a9..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/BlockletInfo.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.carbondata.core.carbon.metadata.blocklet;
-
-import java.io.Serializable;
-import java.util.List;
-
-import org.carbondata.core.carbon.metadata.blocklet.datachunk.DataChunk;
-import org.carbondata.core.carbon.metadata.blocklet.index.BlockletIndex;
-
-/**
- * class to store the information about the blocklet
- */
-public class BlockletInfo implements Serializable {
-
- /**
- * serialization id
- */
- private static final long serialVersionUID = 1873135459695635381L;
-
- /**
- * Number of rows in this blocklet
- */
- private int numberOfRows;
-
- /**
- * Information about dimension chunk of all dimensions in this blocklet
- */
- private List<DataChunk> dimensionColumnChunk;
-
- /**
- * Information about measure chunk of all measures in this blocklet
- */
- private List<DataChunk> measureColumnChunk;
-
- /**
- * to store the index like min max and start and end key of each column of the blocklet
- */
- private BlockletIndex blockletIndex;
-
- /**
- * @return the numberOfRows
- */
- public int getNumberOfRows() {
- return numberOfRows;
- }
-
- /**
- * @param numberOfRows the numberOfRows to set
- */
- public void setNumberOfRows(int numberOfRows) {
- this.numberOfRows = numberOfRows;
- }
-
- /**
- * @return the dimensionColumnChunk
- */
- public List<DataChunk> getDimensionColumnChunk() {
- return dimensionColumnChunk;
- }
-
- /**
- * @param dimensionColumnChunk the dimensionColumnChunk to set
- */
- public void setDimensionColumnChunk(List<DataChunk> dimensionColumnChunk) {
- this.dimensionColumnChunk = dimensionColumnChunk;
- }
-
- /**
- * @return the measureColumnChunk
- */
- public List<DataChunk> getMeasureColumnChunk() {
- return measureColumnChunk;
- }
-
- /**
- * @param measureColumnChunk the measureColumnChunk to set
- */
- public void setMeasureColumnChunk(List<DataChunk> measureColumnChunk) {
- this.measureColumnChunk = measureColumnChunk;
- }
-
- /**
- * @return the blockletIndex
- */
- public BlockletIndex getBlockletIndex() {
- return blockletIndex;
- }
-
- /**
- * @param blockletIndex the blockletIndex to set
- */
- public void setBlockletIndex(BlockletIndex blockletIndex) {
- this.blockletIndex = blockletIndex;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/DataFileFooter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/DataFileFooter.java b/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/DataFileFooter.java
deleted file mode 100644
index 94d8e8b..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/DataFileFooter.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.metadata.blocklet;
-
-import java.io.Serializable;
-import java.util.List;
-
-import org.carbondata.core.carbon.datastore.block.TableBlockInfo;
-import org.carbondata.core.carbon.metadata.blocklet.index.BlockletIndex;
-import org.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema;
-
-/**
- * Information of one data file
- */
-public class DataFileFooter implements Serializable {
-
- /**
- * serialization id
- */
- private static final long serialVersionUID = -7284319972734500751L;
-
- /**
- * version used for data compatibility
- */
- private int versionId;
-
- /**
- * total number of rows in this file
- */
- private long numberOfRows;
-
- /**
- * Segment info (will be same/repeated for all block in this segment)
- */
- private SegmentInfo segmentInfo;
-
- /**
- * Information about leaf nodes of all columns in this file
- */
- private List<BlockletInfo> blockletList;
-
- /**
- * blocklet index of all blocklets in this file
- */
- private BlockletIndex blockletIndex;
-
- /**
- * Description of columns in this file
- */
- private List<ColumnSchema> columnInTable;
-
- /**
- * to store the block info detail like file name block index and locations
- */
- private TableBlockInfo tableBlockInfo;
-
- /**
- * @return the versionId
- */
- public int getVersionId() {
- return versionId;
- }
-
- /**
- * @param versionId the versionId to set
- */
- public void setVersionId(int versionId) {
- this.versionId = versionId;
- }
-
- /**
- * @return the numberOfRows
- */
- public long getNumberOfRows() {
- return numberOfRows;
- }
-
- /**
- * @param numberOfRows the numberOfRows to set
- */
- public void setNumberOfRows(long numberOfRows) {
- this.numberOfRows = numberOfRows;
- }
-
- /**
- * @return the segmentInfo
- */
- public SegmentInfo getSegmentInfo() {
- return segmentInfo;
- }
-
- /**
- * @param segmentInfo the segmentInfo to set
- */
- public void setSegmentInfo(SegmentInfo segmentInfo) {
- this.segmentInfo = segmentInfo;
- }
-
- /**
- * @return the List of Blocklet
- */
- public List<BlockletInfo> getBlockletList() {
- return blockletList;
- }
-
- /**
- * @param blockletList the blockletList to set
- */
- public void setBlockletList(List<BlockletInfo> blockletList) {
- this.blockletList = blockletList;
- }
-
- /**
- * @return the blockletIndex
- */
- public BlockletIndex getBlockletIndex() {
- return blockletIndex;
- }
-
- /**
- * @param blockletIndex the blockletIndex to set
- */
- public void setBlockletIndex(BlockletIndex blockletIndex) {
- this.blockletIndex = blockletIndex;
- }
-
- /**
- * @return the columnInTable
- */
- public List<ColumnSchema> getColumnInTable() {
- return columnInTable;
- }
-
- /**
- * @param columnInTable the columnInTable to set
- */
- public void setColumnInTable(List<ColumnSchema> columnInTable) {
- this.columnInTable = columnInTable;
- }
-
- /**
- * @return the tableBlockInfo
- */
- public TableBlockInfo getTableBlockInfo() {
- return tableBlockInfo;
- }
-
- /**
- * @param tableBlockInfo the tableBlockInfo to set
- */
- public void setTableBlockInfo(TableBlockInfo tableBlockInfo) {
- this.tableBlockInfo = tableBlockInfo;
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/SegmentInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/SegmentInfo.java b/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/SegmentInfo.java
deleted file mode 100644
index a69e061..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/SegmentInfo.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.core.carbon.metadata.blocklet;
-
-import java.io.Serializable;
-
-/**
- * Class holds the information about the segment information
- */
-public class SegmentInfo implements Serializable {
-
- /**
- * serialization version
- */
- private static final long serialVersionUID = -1749874611112709431L;
-
- /**
- * number of column in the segment
- */
- private int numberOfColumns;
-
- /**
- * cardinality of each columns
- * column which is not participating in the multidimensional key cardinality will be -1;
- */
- private int[] columnCardinality;
-
- /**
- * @return the numberOfColumns
- */
- public int getNumberOfColumns() {
- return numberOfColumns;
- }
-
- /**
- * @param numberOfColumns the numberOfColumns to set
- */
- public void setNumberOfColumns(int numberOfColumns) {
- this.numberOfColumns = numberOfColumns;
- }
-
- /**
- * @return the columnCardinality
- */
- public int[] getColumnCardinality() {
- return columnCardinality;
- }
-
- /**
- * @param columnCardinality the columnCardinality to set
- */
- public void setColumnCardinality(int[] columnCardinality) {
- this.columnCardinality = columnCardinality;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/compressor/ChunkCompressorMeta.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/compressor/ChunkCompressorMeta.java b/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/compressor/ChunkCompressorMeta.java
deleted file mode 100644
index bcfd76e..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/compressor/ChunkCompressorMeta.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.carbondata.core.carbon.metadata.blocklet.compressor;
-
-import java.io.Serializable;
-
-/**
- * Represents the compression information of data of one dimension
- * one dimension group in one blocklet
- */
-public class ChunkCompressorMeta implements Serializable {
-
- /**
- * serialization version
- */
- private static final long serialVersionUID = -6697087170420991140L;
-
- /**
- * data chunk compressor
- */
- private CompressionCodec compressor;
-
- /**
- * total byte size of all uncompressed pages in this column chunk (including the headers)
- */
- private long uncompressedSize;
-
- /**
- * total byte size of all compressed pages in this column chunk (including the headers)
- */
- private long compressedSize;
-
- /**
- * @return the compressor
- */
- public CompressionCodec getCompressorCodec() {
- return compressor;
- }
-
- /**
- * @param compressor the compressor to set
- */
- public void setCompressor(CompressionCodec compressor) {
- this.compressor = compressor;
- }
-
- /**
- * @return the uncompressedSize
- */
- public long getUncompressedSize() {
- return uncompressedSize;
- }
-
- /**
- * @param uncompressedSize the uncompressedSize to set
- */
- public void setUncompressedSize(long uncompressedSize) {
- this.uncompressedSize = uncompressedSize;
- }
-
- /**
- * @return the compressedSize
- */
- public long getCompressedSize() {
- return compressedSize;
- }
-
- /**
- * @param compressedSize the compressedSize to set
- */
- public void setCompressedSize(long compressedSize) {
- this.compressedSize = compressedSize;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/compressor/CompressionCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/compressor/CompressionCodec.java b/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/compressor/CompressionCodec.java
deleted file mode 100644
index 76d2ddb..0000000
--- a/core/src/main/java/org/carbondata/core/carbon/metadata/blocklet/compressor/CompressionCodec.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.carbondata.core.carbon.metadata.blocklet.compressor;
-
-/**
- * Compressions supported by Carbon Data.
- */
-public enum CompressionCodec {
-
- /**
- * snappy compression
- */
- SNAPPY,
-}