You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by xu...@apache.org on 2019/06/11 11:45:38 UTC
[carbondata] branch master updated: [CARBONDATA-3411]
[CARBONDATA-3414] Fix clear datamaps logs an exception in SDK
This is an automated email from the ASF dual-hosted git repository.
xubo245 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git
The following commit(s) were added to refs/heads/master by this push:
new e0743fd [CARBONDATA-3411] [CARBONDATA-3414] Fix clear datamaps logs an exception in SDK
e0743fd is described below
commit e0743fde719d2bd7e89ad2eeee89d56fdce72983
Author: ajantha-bhat <aj...@gmail.com>
AuthorDate: Tue Jun 4 11:17:20 2019 +0530
[CARBONDATA-3411] [CARBONDATA-3414] Fix clear datamaps logs an exception in SDK
problem: In sdk when datamaps are cleared, below exception is logged
java.io.IOException: File does not exist: ../carbondata/store/sdk/testWriteFiles/771604793030370/Metadata/schema
cause: CarbonTable is required for only launching the job, SDK there is no need to launch job. so , no need to build a carbon table.
solution: build carbon table only when need to launch job.
problem [CARBONDATA-3411]: when Insert into partition table fails, exception doesn't print reason.
cause: Exception was caught , but error message was not from that exception.
solution: throw the exception directly
This closes #3254
---
.../core/datamap/DataMapStoreManager.java | 49 ++++++++++------------
.../command/management/CarbonLoadDataCommand.scala | 2 +-
.../carbondata/sdk/file/CarbonReaderBuilder.java | 6 ++-
3 files changed, 28 insertions(+), 29 deletions(-)
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
index 89402c2..4d235c5 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
@@ -511,44 +511,41 @@ public final class DataMapStoreManager {
*/
public void clearDataMaps(AbsoluteTableIdentifier identifier, boolean launchJob) {
String tableUniqueName = identifier.getCarbonTableIdentifier().getTableUniqueName();
- CarbonTable carbonTable = getCarbonTable(identifier);
- if (launchJob && CarbonProperties.getInstance()
- .isDistributedPruningEnabled(identifier.getDatabaseName(), identifier.getTableName())) {
+ if (launchJob) {
+ // carbon table need to lookup only if launch job is set.
+ CarbonTable carbonTable = getCarbonTable(identifier);
if (null != carbonTable) {
- try {
- DataMapUtil.executeClearDataMapJob(carbonTable, DataMapUtil.DISTRIBUTED_JOB_NAME);
- } catch (IOException e) {
- LOGGER.error("clear dataMap job failed", e);
- // ignoring the exception
- }
- }
- } else {
- List<TableDataMap> tableIndices = allDataMaps.get(tableUniqueName);
- if (tableIndices == null) {
- String keyUsingTablePath = getKeyUsingTablePath(identifier.getTablePath());
- if (keyUsingTablePath != null) {
- tableUniqueName = keyUsingTablePath;
+ String jobClassName;
+ if (CarbonProperties.getInstance()
+ .isDistributedPruningEnabled(identifier.getDatabaseName(), identifier.getTableName())) {
+ jobClassName = DataMapUtil.DISTRIBUTED_JOB_NAME;
+ } else {
+ jobClassName = DataMapUtil.EMBEDDED_JOB_NAME;
}
- }
- if (launchJob && null != carbonTable) {
try {
- DataMapUtil.executeClearDataMapJob(carbonTable, DataMapUtil.EMBEDDED_JOB_NAME);
+ DataMapUtil.executeClearDataMapJob(carbonTable, jobClassName);
} catch (IOException e) {
LOGGER.error("clear dataMap job failed", e);
// ignoring the exception
}
}
+ } else {
// remove carbon table from meta cache if launchJob is false as this would be called in
// executor side.
- if (!launchJob) {
- CarbonMetadata.getInstance()
- .removeTable(identifier.getDatabaseName(), identifier.getTableName());
+ CarbonMetadata.getInstance()
+ .removeTable(identifier.getDatabaseName(), identifier.getTableName());
+ }
+ List<TableDataMap> tableIndices = allDataMaps.get(tableUniqueName);
+ if (tableIndices == null) {
+ String keyUsingTablePath = getKeyUsingTablePath(identifier.getTablePath());
+ if (keyUsingTablePath != null) {
+ tableUniqueName = keyUsingTablePath;
}
- segmentRefreshMap.remove(identifier.uniqueName());
- clearDataMaps(tableUniqueName);
- allDataMaps.remove(tableUniqueName);
- tablePathMap.remove(tableUniqueName);
}
+ segmentRefreshMap.remove(identifier.uniqueName());
+ clearDataMaps(tableUniqueName);
+ allDataMaps.remove(tableUniqueName);
+ tablePathMap.remove(tableUniqueName);
}
/**
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
index 24a4aac..5225360 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
@@ -847,7 +847,7 @@ case class CarbonLoadDataCommand(
}
LOGGER.info(errorMessage)
LOGGER.error(ex)
- throw new Exception(errorMessage)
+ throw ex
} finally {
CarbonSession.threadUnset("partition.operationcontext")
if (isOverwriteTable) {
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index 3ba7835..9a686bd 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -359,7 +359,8 @@ public class CarbonReaderBuilder {
} catch (Exception ex) {
// Clear the datamap cache as it can get added in getSplits() method
DataMapStoreManager.getInstance().clearDataMaps(
- format.getOrCreateCarbonTable((job.getConfiguration())).getAbsoluteTableIdentifier());
+ format.getOrCreateCarbonTable((job.getConfiguration())).getAbsoluteTableIdentifier(),
+ false);
throw ex;
}
}
@@ -417,7 +418,8 @@ public class CarbonReaderBuilder {
if (format != null) {
// Clear the datamap cache as it is added in getSplits() method
DataMapStoreManager.getInstance().clearDataMaps(
- format.getOrCreateCarbonTable((job.getConfiguration())).getAbsoluteTableIdentifier());
+ format.getOrCreateCarbonTable((job.getConfiguration())).getAbsoluteTableIdentifier(),
+ false);
}
}
return splits.toArray(new InputSplit[splits.size()]);