You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@carbondata.apache.org by "Akash R Nilugal (Jira)" <ji...@apache.org> on 2020/02/06 09:41:00 UTC
[jira] [Updated] (CARBONDATA-3664) Alter set sort columns is
changing schema but evolution entry is not made
[ https://issues.apache.org/jira/browse/CARBONDATA-3664?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Akash R Nilugal updated CARBONDATA-3664:
----------------------------------------
Description:
Alter set sort columns is changing schema but evolution entry is not made
/**
* Loads the datamaps in parallel by utilizing executor
*
* @param carbonTable
* @param dataMapExprWrapper
* @param validSegments
* @param partitionsToPrune
* @throws IOException
*/
public static void loadDataMaps(CarbonTable carbonTable, DataMapExprWrapper dataMapExprWrapper,
List<Segment> validSegments, List<PartitionSpec> partitionsToPrune) throws IOException {
if (!CarbonProperties.getInstance()
.isDistributedPruningEnabled(carbonTable.getDatabaseName(), carbonTable.getTableName())
&& BlockletDataMapUtil.loadDataMapsParallel(carbonTable)) {
String clsName = "org.apache.carbondata.spark.rdd.SparkBlockletDataMapLoaderJob";
DataMapJob dataMapJob = (DataMapJob) createDataMapJob(clsName);
String className = "org.apache.carbondata.hadoop.DistributableBlockletDataMapLoader";
SegmentStatusManager.ValidAndInvalidSegmentsInfo validAndInvalidSegmentsInfo =
getValidAndInvalidSegments(carbonTable, FileFactory.getConfiguration());
List<Segment> invalidSegments = validAndInvalidSegmentsInfo.getInvalidSegments();
FileInputFormat dataMapFormat =
createDataMapJob(carbonTable, dataMapExprWrapper, validSegments, invalidSegments,
partitionsToPrune, className, false);
dataMapJob.execute(carbonTable, dataMapFormat);
}
}
private static FileInputFormat createDataMapJob(CarbonTable carbonTable,
DataMapExprWrapper dataMapExprWrapper, List<Segment> validsegments,
List<Segment> invalidSegments, List<PartitionSpec> partitionsToPrune, String clsName,
boolean isJobToClearDataMaps) {
try {
Constructor<?> cons = Class.forName(clsName).getDeclaredConstructors()[0];
return (FileInputFormat) cons
.newInstance(carbonTable, dataMapExprWrapper, validsegments, invalidSegments,
partitionsToPrune, isJobToClearDataMaps);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
was:Alter set sort columns is changing schema but evolution entry is not made
> Alter set sort columns is changing schema but evolution entry is not made
> -------------------------------------------------------------------------
>
> Key: CARBONDATA-3664
> URL: https://issues.apache.org/jira/browse/CARBONDATA-3664
> Project: CarbonData
> Issue Type: Bug
> Reporter: Akash R Nilugal
> Assignee: Akash R Nilugal
> Priority: Minor
> Fix For: 2.0.0
>
> Time Spent: 1h 20m
> Remaining Estimate: 0h
>
> Alter set sort columns is changing schema but evolution entry is not made
> /**
> * Loads the datamaps in parallel by utilizing executor
> *
> * @param carbonTable
> * @param dataMapExprWrapper
> * @param validSegments
> * @param partitionsToPrune
> * @throws IOException
> */
> public static void loadDataMaps(CarbonTable carbonTable, DataMapExprWrapper dataMapExprWrapper,
> List<Segment> validSegments, List<PartitionSpec> partitionsToPrune) throws IOException {
> if (!CarbonProperties.getInstance()
> .isDistributedPruningEnabled(carbonTable.getDatabaseName(), carbonTable.getTableName())
> && BlockletDataMapUtil.loadDataMapsParallel(carbonTable)) {
> String clsName = "org.apache.carbondata.spark.rdd.SparkBlockletDataMapLoaderJob";
> DataMapJob dataMapJob = (DataMapJob) createDataMapJob(clsName);
> String className = "org.apache.carbondata.hadoop.DistributableBlockletDataMapLoader";
> SegmentStatusManager.ValidAndInvalidSegmentsInfo validAndInvalidSegmentsInfo =
> getValidAndInvalidSegments(carbonTable, FileFactory.getConfiguration());
> List<Segment> invalidSegments = validAndInvalidSegmentsInfo.getInvalidSegments();
> FileInputFormat dataMapFormat =
> createDataMapJob(carbonTable, dataMapExprWrapper, validSegments, invalidSegments,
> partitionsToPrune, className, false);
> dataMapJob.execute(carbonTable, dataMapFormat);
> }
> }
> private static FileInputFormat createDataMapJob(CarbonTable carbonTable,
> DataMapExprWrapper dataMapExprWrapper, List<Segment> validsegments,
> List<Segment> invalidSegments, List<PartitionSpec> partitionsToPrune, String clsName,
> boolean isJobToClearDataMaps) {
> try {
> Constructor<?> cons = Class.forName(clsName).getDeclaredConstructors()[0];
> return (FileInputFormat) cons
> .newInstance(carbonTable, dataMapExprWrapper, validsegments, invalidSegments,
> partitionsToPrune, isJobToClearDataMaps);
> } catch (Exception e) {
> throw new RuntimeException(e);
> }
> }
--
This message was sent by Atlassian Jira
(v8.3.4#803005)