You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ch...@apache.org on 2016/06/30 17:42:36 UTC

[49/50] [abbrv] incubator-carbondata git commit: Resolving compiling issues after merge

Resolving compiling issues after merge


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/4444c324
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/4444c324
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/4444c324

Branch: refs/heads/master
Commit: 4444c324183d91a5b0e44617cd3e5d5d60a00fc0
Parents: 7f72218
Author: ravipesala <ra...@gmail.com>
Authored: Thu Jun 30 19:59:51 2016 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Thu Jun 30 19:59:51 2016 +0530

----------------------------------------------------------------------
 .../spark/merger/RowResultMerger.java           |  1 -
 .../carbondata/spark/load/CarbonLoaderUtil.java | 27 +++++++++----
 .../spark/merger/CarbonDataMergerUtil.java      |  2 +-
 .../spark/sql/CarbonDictionaryDecoder.scala     | 10 +++--
 .../org/apache/spark/sql/CarbonSqlParser.scala  | 12 +++---
 .../execution/command/carbonTableSchema.scala   | 19 ++++-----
 .../spark/sql/hive/CarbonStrategies.scala       | 41 +++++++++++++++++++-
 .../datacompaction/DataCompactionLockTest.scala | 10 ++---
 .../ColumnGroupDataTypesTestCase.scala          | 12 +++---
 .../processing/mdkeygen/MDKeyGenStep.java       |  1 -
 .../store/writer/AbstractFactDataWriter.java    |  4 +-
 11 files changed, 93 insertions(+), 46 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/4444c324/integration/spark/src/main/java/org/carbondata/integration/spark/merger/RowResultMerger.java
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/java/org/carbondata/integration/spark/merger/RowResultMerger.java b/integration/spark/src/main/java/org/carbondata/integration/spark/merger/RowResultMerger.java
index 617efd2..54e32a0 100644
--- a/integration/spark/src/main/java/org/carbondata/integration/spark/merger/RowResultMerger.java
+++ b/integration/spark/src/main/java/org/carbondata/integration/spark/merger/RowResultMerger.java
@@ -39,7 +39,6 @@ import org.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema;
 import org.carbondata.core.carbon.path.CarbonStorePath;
 import org.carbondata.core.carbon.path.CarbonTablePath;
 import org.carbondata.core.constants.CarbonCommonConstants;
-import org.carbondata.core.datastorage.store.columnar.ColumnGroupModel;
 import org.carbondata.core.keygenerator.KeyGenException;
 import org.carbondata.core.util.ByteUtil;
 import org.carbondata.core.util.CarbonUtil;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/4444c324/integration/spark/src/main/java/org/carbondata/spark/load/CarbonLoaderUtil.java
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/java/org/carbondata/spark/load/CarbonLoaderUtil.java b/integration/spark/src/main/java/org/carbondata/spark/load/CarbonLoaderUtil.java
index 220d1b7..af880f5 100644
--- a/integration/spark/src/main/java/org/carbondata/spark/load/CarbonLoaderUtil.java
+++ b/integration/spark/src/main/java/org/carbondata/spark/load/CarbonLoaderUtil.java
@@ -18,9 +18,23 @@
  */
 package org.carbondata.spark.load;
 
-import java.io.*;
+import java.io.BufferedWriter;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
 import java.text.SimpleDateFormat;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 import org.carbondata.common.logging.LogService;
 import org.carbondata.common.logging.LogServiceFactory;
@@ -42,18 +56,18 @@ import org.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
 import org.carbondata.core.carbon.path.CarbonStorePath;
 import org.carbondata.core.carbon.path.CarbonTablePath;
 import org.carbondata.core.constants.CarbonCommonConstants;
-import org.carbondata.core.datastorage.store.fileperations.AtomicFileOperations;
-import org.carbondata.core.datastorage.store.fileperations.AtomicFileOperationsImpl;
-import org.carbondata.core.datastorage.store.fileperations.FileWriteOperation;
 import org.carbondata.core.datastorage.store.filesystem.CarbonFile;
 import org.carbondata.core.datastorage.store.filesystem.CarbonFileFilter;
 import org.carbondata.core.datastorage.store.impl.FileFactory;
 import org.carbondata.core.datastorage.store.impl.FileFactory.FileType;
 import org.carbondata.core.load.LoadMetadataDetails;
-import org.carbondata.core.locks.ICarbonLock;
 import org.carbondata.core.util.CarbonProperties;
 import org.carbondata.core.util.CarbonUtil;
 import org.carbondata.core.util.CarbonUtilException;
+import org.carbondata.lcm.fileoperations.AtomicFileOperations;
+import org.carbondata.lcm.fileoperations.AtomicFileOperationsImpl;
+import org.carbondata.lcm.fileoperations.FileWriteOperation;
+import org.carbondata.lcm.locks.ICarbonLock;
 import org.carbondata.lcm.status.SegmentStatusManager;
 import org.carbondata.processing.api.dataloader.DataLoadModel;
 import org.carbondata.processing.api.dataloader.SchemaInfo;
@@ -67,7 +81,6 @@ import org.carbondata.spark.merger.NodeBlockRelation;
 import org.carbondata.spark.merger.NodeMultiBlockRelation;
 
 import com.google.gson.Gson;
-
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/4444c324/integration/spark/src/main/java/org/carbondata/spark/merger/CarbonDataMergerUtil.java
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/java/org/carbondata/spark/merger/CarbonDataMergerUtil.java b/integration/spark/src/main/java/org/carbondata/spark/merger/CarbonDataMergerUtil.java
index 3aa66c2..f71d7b4 100644
--- a/integration/spark/src/main/java/org/carbondata/spark/merger/CarbonDataMergerUtil.java
+++ b/integration/spark/src/main/java/org/carbondata/spark/merger/CarbonDataMergerUtil.java
@@ -44,9 +44,9 @@ import org.carbondata.core.datastorage.store.filesystem.CarbonFile;
 import org.carbondata.core.datastorage.store.filesystem.CarbonFileFilter;
 import org.carbondata.core.datastorage.store.impl.FileFactory;
 import org.carbondata.core.load.LoadMetadataDetails;
-import org.carbondata.core.locks.ICarbonLock;
 import org.carbondata.core.util.CarbonProperties;
 import org.carbondata.integration.spark.merger.CompactionType;
+import org.carbondata.lcm.locks.ICarbonLock;
 import org.carbondata.lcm.status.SegmentStatusManager;
 import org.carbondata.spark.load.CarbonLoadModel;
 import org.carbondata.spark.load.CarbonLoaderUtil;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/4444c324/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
index 8a9f1c9..ab8b297 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
@@ -198,9 +198,13 @@ case class CarbonDictionaryDecoder(
       cache: Cache[DictionaryColumnUniqueIdentifier, Dictionary]) = {
     val dicts: Seq[Dictionary] = getDictionaryColumnIds.map { f =>
       if (f._2 != null) {
-        cache.get(new DictionaryColumnUniqueIdentifier(
-          atiMap.get(f._1).get.getCarbonTableIdentifier,
-          f._2, f._3))
+        try {
+          cache.get(new DictionaryColumnUniqueIdentifier(
+            atiMap.get(f._1).get.getCarbonTableIdentifier,
+            f._2, f._3))
+        } catch {
+          case _ => null
+        }
       } else {
         null
       }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/4444c324/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
index 406b025..8536544 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.sql
 
-import java.nio.charset.Charset
-import java.util
 import java.util.regex.{Matcher, Pattern}
 
 import scala.collection.JavaConverters._
@@ -33,7 +31,7 @@ import org.apache.spark.sql.catalyst.{SqlLexical, _}
 import org.apache.spark.sql.catalyst.analysis._
 import org.apache.spark.sql.catalyst.plans.logical._
 import org.apache.spark.sql.catalyst.trees.CurrentOrigin
-import org.apache.spark.sql.execution.command.{DimensionRelation, _}
+import org.apache.spark.sql.execution.command._
 import org.apache.spark.sql.execution.datasources.DescribeCommand
 import org.apache.spark.sql.hive.HiveQlWrapper
 
@@ -601,8 +599,8 @@ class CarbonSqlParser()
   }
 
   protected def extractColumnProperties(fields: Seq[Field], tableProperties: Map[String, String]):
-  util.Map[String, util.List[ColumnProperty]] = {
-    val colPropMap = new util.HashMap[String, util.List[ColumnProperty]]()
+  java.util.Map[String, java.util.List[ColumnProperty]] = {
+    val colPropMap = new java.util.HashMap[String, java.util.List[ColumnProperty]]()
     fields.foreach { field =>
       if (field.children.isDefined && field.children.get != null) {
         fillAllChildrenColumnProperty(field.column, field.children, tableProperties, colPropMap)
@@ -615,7 +613,7 @@ class CarbonSqlParser()
 
   protected def fillAllChildrenColumnProperty(parent: String, fieldChildren: Option[List[Field]],
     tableProperties: Map[String, String],
-    colPropMap: util.HashMap[String, util.List[ColumnProperty]]) {
+    colPropMap: java.util.HashMap[String, java.util.List[ColumnProperty]]) {
     fieldChildren.foreach(fields => {
       fields.foreach(field => {
         fillColumnProperty(Some(parent), field.column, tableProperties, colPropMap)
@@ -628,7 +626,7 @@ class CarbonSqlParser()
   protected def fillColumnProperty(parentColumnName: Option[String],
     columnName: String,
     tableProperties: Map[String, String],
-    colPropMap: util.HashMap[String, util.List[ColumnProperty]]) {
+    colPropMap: java.util.HashMap[String, java.util.List[ColumnProperty]]) {
     val (tblPropKey, colProKey) = getKey(parentColumnName, columnName)
     val colProps = CommonUtil.getColumnProperties(tblPropKey, tableProperties)
     if (None != colProps) {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/4444c324/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index b8afcdf..4a6551b 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.command
 import java.io.File
 import java.text.SimpleDateFormat
 import java.util
-import java.util.{Date, UUID}
+import java.util.UUID
 
 import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
@@ -30,10 +30,8 @@ import scala.util.Random
 import org.apache.spark.SparkEnv
 import org.apache.spark.sql._
 import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
-import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
-import org.apache.spark.sql.catalyst.util.DateTimeUtils
-import org.apache.spark.sql.catalyst.util.DateTimeUtils.SQLTimestamp
+import org.apache.spark.sql.catalyst.TableIdentifier._
+import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Cast, Literal}
 import org.apache.spark.sql.execution.{RunnableCommand, SparkPlan}
 import org.apache.spark.sql.hive.HiveContext
 import org.apache.spark.sql.types.TimestampType
@@ -48,20 +46,19 @@ import org.carbondata.core.carbon.metadata.datatype.DataType
 import org.carbondata.core.carbon.metadata.encoder.Encoding
 import org.carbondata.core.carbon.metadata.schema.{SchemaEvolution, SchemaEvolutionEntry}
 import org.carbondata.core.carbon.metadata.schema.table.{CarbonTable, TableInfo, TableSchema}
-import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension
-import org.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema
+import org.carbondata.core.carbon.metadata.schema.table.column.{CarbonDimension, ColumnSchema}
 import org.carbondata.core.constants.CarbonCommonConstants
 import org.carbondata.core.datastorage.store.impl.FileFactory
 import org.carbondata.core.util.{CarbonProperties, CarbonUtil}
 import org.carbondata.integration.spark.merger.CompactionType
 import org.carbondata.lcm.locks.{CarbonLockFactory, LockUsage}
 import org.carbondata.lcm.status.SegmentStatusManager
+import org.carbondata.spark.CarbonSparkFactory
 import org.carbondata.spark.exception.MalformedCarbonCommandException
 import org.carbondata.spark.load._
 import org.carbondata.spark.partition.api.impl.QueryPartitionHelper
 import org.carbondata.spark.rdd.CarbonDataRDDFactory
-import org.carbondata.spark.util.{CarbonScalaUtil, CommonUtil, GlobalDictionaryUtil}
-import org.carbondata.spark.CarbonSparkFactory
+import org.carbondata.spark.util.{CarbonScalaUtil, GlobalDictionaryUtil}
 
 
 case class tableModel(
@@ -1568,7 +1565,7 @@ private[sql] case class LoadTable(
       catch {
         case ex: Exception =>
           LOGGER.error(ex)
-          LOGGER.audit(s"Dataload failure for $schemaName.$tableName. Please check the logs")
+          LOGGER.audit(s"Dataload failure for $dbName.$tableName. Please check the logs")
           throw ex
       }
       finally {
@@ -1583,7 +1580,7 @@ private[sql] case class LoadTable(
         } catch {
           case ex: Exception =>
             LOGGER.error(ex)
-            LOGGER.audit(s"Dataload failure for $schemaName.$tableName. " +
+            LOGGER.audit(s"Dataload failure for $dbName.$tableName. " +
               "Problem deleting the partition folder")
             throw ex
         }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/4444c324/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonStrategies.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonStrategies.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonStrategies.scala
index 1821475..1de8908 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonStrategies.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonStrategies.scala
@@ -17,6 +17,8 @@
 
 package org.apache.spark.sql.hive
 
+import java.util
+
 import scala.collection.JavaConverters._
 
 import org.apache.spark.sql._
@@ -32,6 +34,7 @@ import org.apache.spark.sql.execution.command._
 import org.apache.spark.sql.execution.datasources.{DescribeCommand => LogicalDescribeCommand, LogicalRelation}
 import org.apache.spark.sql.hive.execution.{DescribeHiveTableCommand, DropTable, HiveNativeCommand}
 import org.apache.spark.sql.optimizer.{CarbonAliasDecoderRelation, CarbonDecoderRelation}
+import org.apache.spark.sql.types.IntegerType
 
 import org.carbondata.common.logging.LogServiceFactory
 import org.carbondata.spark.exception.MalformedCarbonCommandException
@@ -138,7 +141,11 @@ class CarbonStrategies(sqlContext: SQLContext) extends QueryPlanner[SparkPlan] {
         predicates,
         useUnsafeCoversion = false)(sqlContext)
       projectExprsNeedToDecode.addAll(scan.attributesNeedToDecode)
-      if (projectExprsNeedToDecode.size() > 0) {
+      val updatedAttrs = scan.attributesRaw.map(attr =>
+        updateDataType(attr.asInstanceOf[AttributeReference], relation, projectExprsNeedToDecode))
+      scan.attributesRaw = updatedAttrs
+      if (projectExprsNeedToDecode.size() > 0
+          && isDictionaryEncoded(projectExprsNeedToDecode.asScala.toSeq, relation)) {
         val decoder = getCarbonDecoder(logicalRelation,
           sc,
           tableName,
@@ -151,7 +158,12 @@ class CarbonStrategies(sqlContext: SQLContext) extends QueryPlanner[SparkPlan] {
           decoder
         }
       } else {
-        scan
+        if (scan.unprocessedExprs.nonEmpty) {
+          val filterCondToAdd = scan.unprocessedExprs.reduceLeftOption(expressions.And)
+          filterCondToAdd.map(Filter(_, scan)).getOrElse(scan)
+        } else {
+          scan
+        }
       }
     }
 
@@ -174,6 +186,31 @@ class CarbonStrategies(sqlContext: SQLContext) extends QueryPlanner[SparkPlan] {
         CarbonAliasDecoderRelation(), scan)(sc)
     }
 
+    def isDictionaryEncoded(projectExprsNeedToDecode: Seq[Attribute],
+        relation: CarbonDatasourceRelation): Boolean = {
+      var isEncoded = false
+      projectExprsNeedToDecode.foreach { attr =>
+        if (relation.carbonRelation.metaData.dictionaryMap.get(attr.name).getOrElse(false)) {
+          isEncoded = true
+        }
+      }
+      isEncoded
+    }
+
+    def updateDataType(attr: AttributeReference,
+        relation: CarbonDatasourceRelation,
+        allAttrsNotDecode: util.Set[Attribute]): AttributeReference = {
+      if (relation.carbonRelation.metaData.dictionaryMap.get(attr.name).getOrElse(false) &&
+        !allAttrsNotDecode.asScala.exists(p => p.name.equals(attr.name))) {
+        AttributeReference(attr.name,
+          IntegerType,
+          attr.nullable,
+          attr.metadata)(attr.exprId, attr.qualifiers)
+      } else {
+        attr
+      }
+    }
+
     private def isStarQuery(plan: LogicalPlan) = {
       plan match {
         case LogicalFilter(condition,

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/4444c324/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionLockTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionLockTest.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionLockTest.scala
index e121214..368c83b 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionLockTest.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionLockTest.scala
@@ -2,18 +2,18 @@ package org.carbondata.spark.testsuite.datacompaction
 
 import java.io.File
 
-import org.apache.spark.sql.Row
+import scala.collection.JavaConverters._
+
 import org.apache.spark.sql.common.util.CarbonHiveContext._
 import org.apache.spark.sql.common.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
+
 import org.carbondata.core.carbon.path.{CarbonStorePath, CarbonTablePath}
 import org.carbondata.core.carbon.{AbsoluteTableIdentifier, CarbonTableIdentifier}
 import org.carbondata.core.constants.CarbonCommonConstants
-import org.carbondata.core.locks.{CarbonLockFactory, ICarbonLock, LockUsage}
 import org.carbondata.core.util.CarbonProperties
+import org.carbondata.lcm.locks.{CarbonLockFactory, ICarbonLock, LockUsage}
 import org.carbondata.lcm.status.SegmentStatusManager
-import org.scalatest.BeforeAndAfterAll
-
-import scala.collection.JavaConverters._
 
 /**
   * FT for data compaction Locking scenario.

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/4444c324/integration/spark/src/test/scala/org/carbondata/spark/testsuite/detailquery/ColumnGroupDataTypesTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/detailquery/ColumnGroupDataTypesTestCase.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/detailquery/ColumnGroupDataTypesTestCase.scala
index 8098308..c20e18d 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/detailquery/ColumnGroupDataTypesTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/detailquery/ColumnGroupDataTypesTestCase.scala
@@ -47,37 +47,37 @@ class ColumnGroupDataTypesTestCase extends QueryTest with BeforeAndAfterAll {
       sql("select column1,column2,column3,column4,column5,column6,column7,column8,column9,column10 from colgrp"),
       sql("select column1,column2,column3,column4,column5,column6,column7,column8,column9,column10 from normal"))
   }
-  
+
   test("select all dimension query with filter on columnar") {
     checkAnswer(
       sql("select column1,column2,column3,column4,column5,column6,column7,column8,column9,column10 from colgrp where column1='column1666'"),
       sql("select column1,column2,column3,column4,column5,column6,column7,column8,column9,column10 from normal where column1='column1666'"))
   }
-  
+
   test("select all dimension query with filter on column group dimension") {
     checkAnswer(
       sql("select column1,column2,column3,column4,column5,column6,column7,column8,column9,column10 from colgrp where column3='column311'"),
       sql("select column1,column2,column3,column4,column5,column6,column7,column8,column9,column10 from normal where column3='column311'"))
   }
-  
+
   test("select all dimension query with filter on two dimension from different column group") {
     checkAnswer(
       sql("select column1,column2,column3,column4,column5,column6,column7,column8,column9,column10 from colgrp where column3='column311' and column7='column74' "),
       sql("select column1,column2,column3,column4,column5,column6,column7,column8,column9,column10 from normal where column3='column311' and column7='column74'"))
   }
-  
+
   test("select all dimension query with filter on two dimension from same column group") {
     checkAnswer(
       sql("select column1,column2,column3,column4,column5,column6,column7,column8,column9,column10 from colgrp where column3='column311' and column4='column42' "),
       sql("select column1,column2,column3,column4,column5,column6,column7,column8,column9,column10 from normal where column3='column311' and column4='column42'"))
   }
-  
+
   test("select all dimension query with filter on two dimension one from column group other from columnar") {
     checkAnswer(
       sql("select column1,column2,column3,column4,column5,column6,column7,column8,column9,column10 from colgrp where column3='column311' and column5='column516' "),
       sql("select column1,column2,column3,column4,column5,column6,column7,column8,column9,column10 from normal where column3='column311' and column5='column516'"))
   }
-  
+
   test("select few dimension") {
     checkAnswer(
       sql("select column1,column3,column4,column5,column6,column9,column10 from colgrp"),

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/4444c324/processing/src/main/java/org/carbondata/processing/mdkeygen/MDKeyGenStep.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/carbondata/processing/mdkeygen/MDKeyGenStep.java b/processing/src/main/java/org/carbondata/processing/mdkeygen/MDKeyGenStep.java
index 3dd64be..de35082 100644
--- a/processing/src/main/java/org/carbondata/processing/mdkeygen/MDKeyGenStep.java
+++ b/processing/src/main/java/org/carbondata/processing/mdkeygen/MDKeyGenStep.java
@@ -39,7 +39,6 @@ import org.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema;
 import org.carbondata.core.carbon.path.CarbonStorePath;
 import org.carbondata.core.carbon.path.CarbonTablePath;
 import org.carbondata.core.constants.CarbonCommonConstants;
-import org.carbondata.core.datastorage.store.columnar.ColumnGroupModel;
 import org.carbondata.core.keygenerator.KeyGenException;
 import org.carbondata.core.util.CarbonProperties;
 import org.carbondata.core.util.CarbonUtil;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/4444c324/processing/src/main/java/org/carbondata/processing/store/writer/AbstractFactDataWriter.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/carbondata/processing/store/writer/AbstractFactDataWriter.java b/processing/src/main/java/org/carbondata/processing/store/writer/AbstractFactDataWriter.java
index f1c7ad5..e9e1a56 100644
--- a/processing/src/main/java/org/carbondata/processing/store/writer/AbstractFactDataWriter.java
+++ b/processing/src/main/java/org/carbondata/processing/store/writer/AbstractFactDataWriter.java
@@ -54,8 +54,6 @@ import org.carbondata.core.carbon.path.CarbonTablePath;
 import org.carbondata.core.constants.CarbonCommonConstants;
 import org.carbondata.core.datastorage.store.filesystem.CarbonFile;
 import org.carbondata.core.datastorage.store.impl.FileFactory;
-import org.carbondata.core.file.manager.composite.FileData;
-import org.carbondata.core.file.manager.composite.IFileManagerComposite;
 import org.carbondata.core.metadata.BlockletInfoColumnar;
 import org.carbondata.core.util.ByteUtil;
 import org.carbondata.core.util.CarbonMergerUtil;
@@ -67,6 +65,8 @@ import org.carbondata.core.writer.CarbonIndexFileWriter;
 import org.carbondata.format.BlockIndex;
 import org.carbondata.format.FileFooter;
 import org.carbondata.format.IndexHeader;
+import org.carbondata.processing.mdkeygen.file.FileData;
+import org.carbondata.processing.mdkeygen.file.IFileManagerComposite;
 import org.carbondata.processing.store.CarbonDataFileAttributes;
 import org.carbondata.processing.store.writer.exception.CarbonDataWriterException;