You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ch...@apache.org on 2016/11/09 09:08:25 UTC
[1/2] incubator-carbondata git commit: CARBONDATA-383 Optimize
hdfsStoreLocation/hdfsStorePath parameters' name since Carbon not only
support hdfs path
Repository: incubator-carbondata
Updated Branches:
refs/heads/master 97377afae -> b4c3e5115
CARBONDATA-383 Optimize hdfsStoreLocation/hdfsStorePath parameters' name since Carbon not only support hdfs path
Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/a65ca7cc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/a65ca7cc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/a65ca7cc
Branch: refs/heads/master
Commit: a65ca7ccb1dbe240c32ede1d0bc01f1e427c424b
Parents: 97377af
Author: hexiaoqiao <he...@meituan.com>
Authored: Wed Nov 9 14:41:05 2016 +0800
Committer: hexiaoqiao <he...@meituan.com>
Committed: Wed Nov 9 16:19:13 2016 +0800
----------------------------------------------------------------------
.../CarbonDictionaryMetadataReaderImpl.java | 12 ++--
.../core/reader/CarbonDictionaryReaderImpl.java | 14 ++--
.../CarbonDictionarySortIndexReaderImpl.java | 2 +-
.../core/writer/CarbonDictionaryWriterImpl.java | 14 ++--
...CarbonDictionarySortIndexReaderImplTest.java | 16 ++---
...CarbonDictionarySortIndexWriterImplTest.java | 14 ++--
.../carbondata/spark/load/CarbonLoaderUtil.java | 4 +-
.../carbondata/spark/CarbonSparkFactory.scala | 2 +-
.../spark/DictionaryDetailHelper.scala | 4 +-
.../spark/rdd/CarbonDataLoadRDD.scala | 16 ++---
.../spark/rdd/CarbonDataRDDFactory.scala | 70 ++++++++++----------
.../spark/rdd/CarbonDeleteLoadByDateRDD.scala | 2 +-
.../carbondata/spark/rdd/CarbonMergerRDD.scala | 8 +--
.../apache/carbondata/spark/rdd/Compactor.scala | 7 +-
.../execution/command/carbonTableSchema.scala | 4 +-
15 files changed, 93 insertions(+), 96 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
index 8c03100..c79c15d 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
@@ -44,9 +44,9 @@ public class CarbonDictionaryMetadataReaderImpl implements CarbonDictionaryMetad
protected CarbonTableIdentifier carbonTableIdentifier;
/**
- * HDFS store path
+ * carbon dictionary meta data store path
*/
- protected String hdfsStorePath;
+ protected String storePath;
/**
* column identifier
@@ -66,13 +66,13 @@ public class CarbonDictionaryMetadataReaderImpl implements CarbonDictionaryMetad
/**
* Constructor
*
- * @param hdfsStorePath HDFS store path
+ * @param storePath carbon dictionary meta data store path
* @param carbonTableIdentifier table identifier which will give table name and database name
* @param columnIdentifier column unique identifier
*/
- public CarbonDictionaryMetadataReaderImpl(String hdfsStorePath,
+ public CarbonDictionaryMetadataReaderImpl(String storePath,
CarbonTableIdentifier carbonTableIdentifier, ColumnIdentifier columnIdentifier) {
- this.hdfsStorePath = hdfsStorePath;
+ this.storePath = storePath;
this.carbonTableIdentifier = carbonTableIdentifier;
this.columnIdentifier = columnIdentifier;
initFileLocation();
@@ -157,7 +157,7 @@ public class CarbonDictionaryMetadataReaderImpl implements CarbonDictionaryMetad
protected void initFileLocation() {
PathService pathService = CarbonCommonFactory.getPathService();
CarbonTablePath carbonTablePath =
- pathService.getCarbonTablePath(this.hdfsStorePath, carbonTableIdentifier);
+ pathService.getCarbonTablePath(this.storePath, carbonTableIdentifier);
this.columnDictionaryMetadataFilePath =
carbonTablePath.getDictionaryMetaFilePath(columnIdentifier.getColumnId());
}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryReaderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryReaderImpl.java b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryReaderImpl.java
index 0e1bf6d..bf475f5 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryReaderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryReaderImpl.java
@@ -47,9 +47,9 @@ public class CarbonDictionaryReaderImpl implements CarbonDictionaryReader {
protected CarbonTableIdentifier carbonTableIdentifier;
/**
- * HDFS store path
+ * carbon dictionary data store path
*/
- protected String hdfsStorePath;
+ protected String storePath;
/**
* column name
@@ -69,13 +69,13 @@ public class CarbonDictionaryReaderImpl implements CarbonDictionaryReader {
/**
* Constructor
*
- * @param hdfsStorePath HDFS store path
+ * @param storePath carbon dictionary data store path
* @param carbonTableIdentifier table identifier which will give table name and database name
* @param columnIdentifier column unique identifier
*/
- public CarbonDictionaryReaderImpl(String hdfsStorePath,
+ public CarbonDictionaryReaderImpl(String storePath,
CarbonTableIdentifier carbonTableIdentifier, ColumnIdentifier columnIdentifier) {
- this.hdfsStorePath = hdfsStorePath;
+ this.storePath = storePath;
this.carbonTableIdentifier = carbonTableIdentifier;
this.columnIdentifier = columnIdentifier;
initFileLocation();
@@ -223,7 +223,7 @@ public class CarbonDictionaryReaderImpl implements CarbonDictionaryReader {
protected void initFileLocation() {
PathService pathService = CarbonCommonFactory.getPathService();
CarbonTablePath carbonTablePath = pathService.getCarbonTablePath(
- this.hdfsStorePath, carbonTableIdentifier);
+ this.storePath, carbonTableIdentifier);
this.columnDictionaryFilePath = carbonTablePath
.getDictionaryFilePath(columnIdentifier.getColumnId());
}
@@ -308,7 +308,7 @@ public class CarbonDictionaryReaderImpl implements CarbonDictionaryReader {
* @return
*/
protected CarbonDictionaryMetadataReader getDictionaryMetadataReader() {
- return new CarbonDictionaryMetadataReaderImpl(this.hdfsStorePath, carbonTableIdentifier,
+ return new CarbonDictionaryMetadataReaderImpl(this.storePath, carbonTableIdentifier,
this.columnIdentifier);
}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java b/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
index c573f91..00ae688 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
@@ -53,7 +53,7 @@ public class CarbonDictionarySortIndexReaderImpl implements CarbonDictionarySort
protected ColumnIdentifier columnIdentifier;
/**
- * hdfs store location
+ * store location
*/
protected String carbonStorePath;
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java b/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
index 31f022d..a23ba75 100644
--- a/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
@@ -81,9 +81,9 @@ public class CarbonDictionaryWriterImpl implements CarbonDictionaryWriter {
protected ColumnIdentifier columnIdentifier;
/**
- * HDFS store path
+ * carbon dictionary data store path
*/
- protected String hdfsStorePath;
+ protected String storePath;
/**
* dictionary file path
@@ -131,15 +131,15 @@ public class CarbonDictionaryWriterImpl implements CarbonDictionaryWriter {
/**
* Constructor
*
- * @param hdfsStorePath HDFS store path
+ * @param storePath carbon dictionary data store path
* @param carbonTableIdentifier table identifier which will give table name and database name
* @param columnIdentifier column unique identifier
*/
- public CarbonDictionaryWriterImpl(String hdfsStorePath,
+ public CarbonDictionaryWriterImpl(String storePath,
CarbonTableIdentifier carbonTableIdentifier, ColumnIdentifier columnIdentifier) {
this.carbonTableIdentifier = carbonTableIdentifier;
this.columnIdentifier = columnIdentifier;
- this.hdfsStorePath = hdfsStorePath;
+ this.storePath = storePath;
this.isFirstTime = true;
}
@@ -253,7 +253,7 @@ public class CarbonDictionaryWriterImpl implements CarbonDictionaryWriter {
protected void initPaths() {
PathService pathService = CarbonCommonFactory.getPathService();
CarbonTablePath carbonTablePath = pathService.getCarbonTablePath(
- this.hdfsStorePath, carbonTableIdentifier);
+ this.storePath, carbonTableIdentifier);
this.dictionaryFilePath = carbonTablePath.getDictionaryFilePath(columnIdentifier.getColumnId());
this.dictionaryMetaFilePath =
carbonTablePath.getDictionaryMetaFilePath(columnIdentifier.getColumnId());
@@ -400,7 +400,7 @@ public class CarbonDictionaryWriterImpl implements CarbonDictionaryWriter {
* @return
*/
protected CarbonDictionaryMetadataReader getDictionaryMetadataReader() {
- return new CarbonDictionaryMetadataReaderImpl(hdfsStorePath, carbonTableIdentifier,
+ return new CarbonDictionaryMetadataReaderImpl(storePath, carbonTableIdentifier,
columnIdentifier);
}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/core/src/test/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImplTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImplTest.java b/core/src/test/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImplTest.java
index de5cc1a..700edc4 100644
--- a/core/src/test/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImplTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImplTest.java
@@ -43,10 +43,10 @@ import org.junit.Test;
*
*/
public class CarbonDictionarySortIndexReaderImplTest {
- private String hdfsStorePath;
+ private String storePath;
@Before public void setUp() throws Exception {
- hdfsStorePath = "target/carbonStore";
+ storePath = "target/carbonStore";
}
@After public void tearDown() throws Exception {
@@ -64,12 +64,12 @@ public class CarbonDictionarySortIndexReaderImplTest {
CarbonTableIdentifier carbonTableIdentifier = new CarbonTableIdentifier("testSchema", "carbon",
UUID.randomUUID().toString());
ColumnIdentifier columnIdentifier = new ColumnIdentifier("Name", null, null);
- CarbonDictionaryWriter dictionaryWriter = new CarbonDictionaryWriterImpl(hdfsStorePath,
+ CarbonDictionaryWriter dictionaryWriter = new CarbonDictionaryWriterImpl(storePath,
carbonTableIdentifier, columnIdentifier);
- String metaFolderPath =hdfsStorePath+File.separator+carbonTableIdentifier.getDatabaseName()+File.separator+carbonTableIdentifier.getTableName()+File.separator+"Metadata";
+ String metaFolderPath =storePath+File.separator+carbonTableIdentifier.getDatabaseName()+File.separator+carbonTableIdentifier.getTableName()+File.separator+"Metadata";
CarbonUtil.checkAndCreateFolder(metaFolderPath);
CarbonDictionarySortIndexWriter dictionarySortIndexWriter =
- new CarbonDictionarySortIndexWriterImpl(carbonTableIdentifier, columnIdentifier, hdfsStorePath);
+ new CarbonDictionarySortIndexWriterImpl(carbonTableIdentifier, columnIdentifier, storePath);
List<int[]> expectedData = prepareExpectedData();
int[] data = expectedData.get(0);
for(int i=0;i<data.length;i++) {
@@ -83,7 +83,7 @@ public class CarbonDictionarySortIndexReaderImplTest {
dictionarySortIndexWriter.writeInvertedSortIndex(invertedSortIndex);
dictionarySortIndexWriter.close();
CarbonDictionarySortIndexReader dictionarySortIndexReader =
- new CarbonDictionarySortIndexReaderImpl(carbonTableIdentifier, columnIdentifier, hdfsStorePath);
+ new CarbonDictionarySortIndexReaderImpl(carbonTableIdentifier, columnIdentifier, storePath);
List<Integer> actualSortIndex = dictionarySortIndexReader.readSortIndex();
List<Integer> actualInvertedSortIndex = dictionarySortIndexReader.readInvertedSortIndex();
for (int i = 0; i < actualSortIndex.size(); i++) {
@@ -111,8 +111,8 @@ public class CarbonDictionarySortIndexReaderImplTest {
* this method will delete the store path
*/
private void deleteStorePath() {
- FileFactory.FileType fileType = FileFactory.getFileType(this.hdfsStorePath);
- CarbonFile carbonFile = FileFactory.getCarbonFile(this.hdfsStorePath, fileType);
+ FileFactory.FileType fileType = FileFactory.getFileType(this.storePath);
+ CarbonFile carbonFile = FileFactory.getCarbonFile(this.storePath, fileType);
deleteRecursiveSilent(carbonFile);
}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/core/src/test/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImplTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImplTest.java b/core/src/test/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImplTest.java
index fbdf49a..30289e4 100644
--- a/core/src/test/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImplTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImplTest.java
@@ -44,10 +44,10 @@ import org.junit.Test;
*/
public class CarbonDictionarySortIndexWriterImplTest {
- private String hdfsStorePath;
+ private String storePath;
@Before public void setUp() throws Exception {
- hdfsStorePath = "target/carbonStore";
+ storePath = "target/carbonStore";
}
@After public void tearDown() throws Exception {
@@ -62,13 +62,12 @@ public class CarbonDictionarySortIndexWriterImplTest {
* @throws Exception
*/
@Test public void write() throws Exception {
- String storePath = hdfsStorePath;
CarbonTableIdentifier carbonTableIdentifier = new CarbonTableIdentifier("testSchema", "carbon", UUID.randomUUID().toString());
ColumnIdentifier columnIdentifier = new ColumnIdentifier("Name", null, null);
- String metaFolderPath =hdfsStorePath+File.separator+carbonTableIdentifier.getDatabaseName()+File.separator+carbonTableIdentifier.getTableName()+File.separator+"Metadata";
+ String metaFolderPath =storePath+File.separator+carbonTableIdentifier.getDatabaseName()+File.separator+carbonTableIdentifier.getTableName()+File.separator+"Metadata";
CarbonUtil.checkAndCreateFolder(metaFolderPath);
- CarbonDictionaryWriter dictionaryWriter = new CarbonDictionaryWriterImpl(hdfsStorePath,
+ CarbonDictionaryWriter dictionaryWriter = new CarbonDictionaryWriterImpl(storePath,
carbonTableIdentifier, columnIdentifier);
CarbonDictionarySortIndexWriter dictionarySortIndexWriter =
new CarbonDictionarySortIndexWriterImpl(carbonTableIdentifier, columnIdentifier, storePath);
@@ -100,7 +99,6 @@ public class CarbonDictionarySortIndexWriterImplTest {
* @throws Exception
*/
@Test public void writingEmptyValue() throws Exception {
- String storePath = hdfsStorePath;
CarbonTableIdentifier carbonTableIdentifier = new CarbonTableIdentifier("testSchema", "carbon", UUID.randomUUID().toString());
ColumnIdentifier columnIdentifier = new ColumnIdentifier("Name", null, null);
@@ -135,8 +133,8 @@ public class CarbonDictionarySortIndexWriterImplTest {
* this method will delete the store path
*/
private void deleteStorePath() {
- FileFactory.FileType fileType = FileFactory.getFileType(this.hdfsStorePath);
- CarbonFile carbonFile = FileFactory.getCarbonFile(this.hdfsStorePath, fileType);
+ FileFactory.FileType fileType = FileFactory.getFileType(this.storePath);
+ CarbonFile carbonFile = FileFactory.getCarbonFile(this.storePath, fileType);
deleteRecursiveSilent(carbonFile);
}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/integration/spark/src/main/java/org/apache/carbondata/spark/load/CarbonLoaderUtil.java
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/java/org/apache/carbondata/spark/load/CarbonLoaderUtil.java b/integration/spark/src/main/java/org/apache/carbondata/spark/load/CarbonLoaderUtil.java
index ce67e66..fa796ab 100644
--- a/integration/spark/src/main/java/org/apache/carbondata/spark/load/CarbonLoaderUtil.java
+++ b/integration/spark/src/main/java/org/apache/carbondata/spark/load/CarbonLoaderUtil.java
@@ -150,7 +150,7 @@ public final class CarbonLoaderUtil {
}
public static void executeGraph(CarbonLoadModel loadModel, String storeLocation,
- String hdfsStoreLocation, String kettleHomePath) throws Exception {
+ String storePath, String kettleHomePath) throws Exception {
System.setProperty("KETTLE_HOME", kettleHomePath);
if (!new File(storeLocation).mkdirs()) {
LOGGER.error("Error while creating the temp store path: " + storeLocation);
@@ -162,7 +162,7 @@ public final class CarbonLoaderUtil {
+ CarbonCommonConstants.UNDERSCORE + loadModel.getTaskNo();
CarbonProperties.getInstance().addProperty(tempLocationKey, storeLocation);
CarbonProperties.getInstance()
- .addProperty(CarbonCommonConstants.STORE_LOCATION_HDFS, hdfsStoreLocation);
+ .addProperty(CarbonCommonConstants.STORE_LOCATION_HDFS, storePath);
// CarbonProperties.getInstance().addProperty("store_output_location", outPutLoc);
CarbonProperties.getInstance().addProperty("send.signal.load", "false");
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonSparkFactory.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonSparkFactory.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonSparkFactory.scala
index 1b6c2d0..7618558 100644
--- a/integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonSparkFactory.scala
+++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonSparkFactory.scala
@@ -31,7 +31,7 @@ trait ColumnValidator {
*/
trait DictionaryDetailService {
def getDictionaryDetail(dictFolderPath: String, primDimensions: Array[CarbonDimension],
- table: CarbonTableIdentifier, hdfsLocation: String): DictionaryDetail
+ table: CarbonTableIdentifier, storePath: String): DictionaryDetail
}
/**
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/integration/spark/src/main/scala/org/apache/carbondata/spark/DictionaryDetailHelper.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/DictionaryDetailHelper.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/DictionaryDetailHelper.scala
index 77f5895..52457b8 100644
--- a/integration/spark/src/main/scala/org/apache/carbondata/spark/DictionaryDetailHelper.scala
+++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/DictionaryDetailHelper.scala
@@ -26,8 +26,8 @@ import org.apache.carbondata.core.datastorage.store.impl.FileFactory
class DictionaryDetailHelper extends DictionaryDetailService {
def getDictionaryDetail(dictfolderPath: String, primDimensions: Array[CarbonDimension],
- table: CarbonTableIdentifier, hdfsLocation: String): DictionaryDetail = {
- val carbonTablePath = CarbonStorePath.getCarbonTablePath(hdfsLocation, table)
+ table: CarbonTableIdentifier, storePath: String): DictionaryDetail = {
+ val carbonTablePath = CarbonStorePath.getCarbonTablePath(storePath, table)
val dictFilePaths = new Array[String](primDimensions.length)
val dictFileExists = new Array[Boolean](primDimensions.length)
val columnIdentifier = new Array[ColumnIdentifier](primDimensions.length)
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
index 3f411bc..4baeb67 100644
--- a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
+++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
@@ -78,7 +78,7 @@ class CarbonNodePartition(rddId: Int, val idx: Int, host: String,
class SparkPartitionLoader(model: CarbonLoadModel,
splitIndex: Int,
- hdfsStoreLocation: String,
+ storePath: String,
kettleHomePath: String,
loadCount: Int,
loadMetadataDetails: LoadMetadataDetails) extends Logging{
@@ -122,7 +122,7 @@ class SparkPartitionLoader(model: CarbonLoadModel,
def run(): Unit = {
try {
- CarbonLoaderUtil.executeGraph(model, storeLocation, hdfsStoreLocation,
+ CarbonLoaderUtil.executeGraph(model, storeLocation, storePath,
kettleHomePath)
} catch {
case e: DataLoadingException => if (e.getErrorCode ==
@@ -166,7 +166,7 @@ class SparkPartitionLoader(model: CarbonLoadModel,
* @param result Output result
* @param carbonLoadModel Carbon load model which contain the load info
* @param storeLocation Tmp store location
- * @param hdfsStoreLocation The store location in hdfs
+ * @param storePath The store location
* @param kettleHomePath The kettle home path
* @param partitioner Partitioner which specify how to partition
* @param columinar whether it is columinar
@@ -183,7 +183,7 @@ class DataFileLoaderRDD[K, V](
result: DataLoadResult[K, V],
carbonLoadModel: CarbonLoadModel,
var storeLocation: String,
- hdfsStoreLocation: String,
+ storePath: String,
kettleHomePath: String,
partitioner: Partitioner,
columinar: Boolean,
@@ -241,7 +241,7 @@ class DataFileLoaderRDD[K, V](
carbonLoadModel.setSegmentId(String.valueOf(loadCount))
setModelAndBlocksInfo()
- val loader = new SparkPartitionLoader(model, theSplit.index, hdfsStoreLocation,
+ val loader = new SparkPartitionLoader(model, theSplit.index, storePath,
kettleHomePath, loadCount, loadMetadataDetails)
loader.initialize
loadMetadataDetails.setLoadStatus(CarbonCommonConstants.STORE_LOADSTATUS_SUCCESS)
@@ -483,7 +483,7 @@ class DataFileLoaderRDD[K, V](
* @param result
* @param carbonLoadModel
* @param storeLocation
- * @param hdfsStoreLocation
+ * @param storePath
* @param kettleHomePath
* @param columinar
* @param loadCount
@@ -498,7 +498,7 @@ class DataFrameLoaderRDD[K, V](
result: DataLoadResult[K, V],
carbonLoadModel: CarbonLoadModel,
var storeLocation: String,
- hdfsStoreLocation: String,
+ storePath: String,
kettleHomePath: String,
columinar: Boolean,
loadCount: Integer,
@@ -522,7 +522,7 @@ class DataFrameLoaderRDD[K, V](
carbonLoadModel.setPartitionId(partitionID)
carbonLoadModel.setSegmentId(String.valueOf(loadCount))
carbonLoadModel.setTaskNo(String.valueOf(theSplit.index))
- val loader = new SparkPartitionLoader(carbonLoadModel, theSplit.index, hdfsStoreLocation,
+ val loader = new SparkPartitionLoader(carbonLoadModel, theSplit.index, storePath,
kettleHomePath, loadCount, loadMetadataDetails)
loader.initialize
loadMetadataDetails.setLoadStatus(CarbonCommonConstants.STORE_LOADSTATUS_SUCCESS)
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
index 054dd90..f97bd43 100644
--- a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
+++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
@@ -65,7 +65,7 @@ object CarbonDataRDDFactory extends Logging {
sqlContext: SQLContext,
carbonLoadModel: CarbonLoadModel,
storeLocation: String,
- hdfsStoreLocation: String,
+ storePath: String,
partitioner: Partitioner) {
val table = CarbonMetadata.getInstance()
.getCarbonTable(carbonLoadModel.getDatabaseName + "_" + carbonLoadModel.getTableName)
@@ -77,7 +77,7 @@ object CarbonDataRDDFactory extends Logging {
schema: CarbonDataLoadSchema,
databaseName: String,
tableName: String,
- hdfsStoreLocation: String,
+ storePath: String,
dateField: String,
dateFieldActualName: String,
dateValue: String,
@@ -101,7 +101,7 @@ object CarbonDataRDDFactory extends Logging {
partitioner,
table.getFactTableName,
tableName,
- hdfsStoreLocation,
+ storePath,
loadMetadataDetailsArray).collect.groupBy(_._1)
var updatedLoadMetadataDetailsList = new ListBuffer[LoadMetadataDetails]()
@@ -196,7 +196,7 @@ object CarbonDataRDDFactory extends Logging {
def alterTableForCompaction(sqlContext: SQLContext,
alterTableModel: AlterTableModel,
- carbonLoadModel: CarbonLoadModel, partitioner: Partitioner, hdfsStoreLocation: String,
+ carbonLoadModel: CarbonLoadModel, partitioner: Partitioner, storePath: String,
kettleHomePath: String, storeLocation: String): Unit = {
var compactionSize: Long = 0
var compactionType: CompactionType = CompactionType.MINOR_COMPACTION
@@ -217,7 +217,7 @@ object CarbonDataRDDFactory extends Logging {
.getTableCreationTime(carbonLoadModel.getDatabaseName, carbonLoadModel.getTableName)
if (null == carbonLoadModel.getLoadMetadataDetails) {
- readLoadMetadataDetails(carbonLoadModel, hdfsStoreLocation)
+ readLoadMetadataDetails(carbonLoadModel, storePath)
}
// reading the start time of data load.
val loadStartTime = CarbonLoaderUtil.readCurrentTime()
@@ -247,7 +247,7 @@ object CarbonDataRDDFactory extends Logging {
handleCompactionForSystemLocking(sqlContext,
carbonLoadModel,
partitioner,
- hdfsStoreLocation,
+ storePath,
kettleHomePath,
storeLocation,
compactionType,
@@ -271,7 +271,7 @@ object CarbonDataRDDFactory extends Logging {
startCompactionThreads(sqlContext,
carbonLoadModel,
partitioner,
- hdfsStoreLocation,
+ storePath,
kettleHomePath,
storeLocation,
compactionModel,
@@ -301,7 +301,7 @@ object CarbonDataRDDFactory extends Logging {
def handleCompactionForSystemLocking(sqlContext: SQLContext,
carbonLoadModel: CarbonLoadModel,
partitioner: Partitioner,
- hdfsStoreLocation: String,
+ storePath: String,
kettleHomePath: String,
storeLocation: String,
compactionType: CompactionType,
@@ -320,7 +320,7 @@ object CarbonDataRDDFactory extends Logging {
startCompactionThreads(sqlContext,
carbonLoadModel,
partitioner,
- hdfsStoreLocation,
+ storePath,
kettleHomePath,
storeLocation,
compactionModel,
@@ -363,7 +363,7 @@ object CarbonDataRDDFactory extends Logging {
}
def executeCompaction(carbonLoadModel: CarbonLoadModel,
- hdfsStoreLocation: String,
+ storePath: String,
compactionModel: CompactionModel,
partitioner: Partitioner,
executor: ExecutorService,
@@ -377,7 +377,7 @@ object CarbonDataRDDFactory extends Logging {
var segList = carbonLoadModel.getLoadMetadataDetails
var loadsToMerge = CarbonDataMergerUtil.identifySegmentsToBeMerged(
- hdfsStoreLocation,
+ storePath,
carbonLoadModel,
partitioner.partitionCount,
compactionModel.compactionSize,
@@ -395,7 +395,7 @@ object CarbonDataRDDFactory extends Logging {
scanSegmentsAndSubmitJob(futureList,
loadsToMerge,
executor,
- hdfsStoreLocation,
+ storePath,
sqlContext,
compactionModel,
kettleHomePath,
@@ -419,7 +419,7 @@ object CarbonDataRDDFactory extends Logging {
// scan again and determine if anything is there to merge again.
- readLoadMetadataDetails(carbonLoadModel, hdfsStoreLocation)
+ readLoadMetadataDetails(carbonLoadModel, storePath)
segList = carbonLoadModel.getLoadMetadataDetails
// in case of major compaction we will scan only once and come out as it will keep
// on doing major for the new loads also.
@@ -430,7 +430,7 @@ object CarbonDataRDDFactory extends Logging {
.filterOutNewlyAddedSegments(carbonLoadModel.getLoadMetadataDetails, lastSegment)
}
loadsToMerge = CarbonDataMergerUtil.identifySegmentsToBeMerged(
- hdfsStoreLocation,
+ storePath,
carbonLoadModel,
partitioner.partitionCount,
compactionModel.compactionSize,
@@ -448,7 +448,7 @@ object CarbonDataRDDFactory extends Logging {
loadsToMerge: util
.List[LoadMetadataDetails],
executor: ExecutorService,
- hdfsStoreLocation: String,
+ storePath: String,
sqlContext: SQLContext,
compactionModel: CompactionModel,
kettleHomePath: String,
@@ -461,7 +461,7 @@ object CarbonDataRDDFactory extends Logging {
}
)
- val compactionCallableModel = CompactionCallableModel(hdfsStoreLocation,
+ val compactionCallableModel = CompactionCallableModel(storePath,
carbonLoadModel,
partitioner,
storeLocation,
@@ -483,14 +483,14 @@ object CarbonDataRDDFactory extends Logging {
def startCompactionThreads(sqlContext: SQLContext,
carbonLoadModel: CarbonLoadModel,
partitioner: Partitioner,
- hdfsStoreLocation: String,
+ storePath: String,
kettleHomePath: String,
storeLocation: String,
compactionModel: CompactionModel,
compactionLock: ICarbonLock): Unit = {
val executor: ExecutorService = Executors.newFixedThreadPool(1)
// update the updated table status.
- readLoadMetadataDetails(carbonLoadModel, hdfsStoreLocation)
+ readLoadMetadataDetails(carbonLoadModel, storePath)
var segList: util.List[LoadMetadataDetails] = carbonLoadModel.getLoadMetadataDetails
// clean up of the stale segments.
@@ -514,7 +514,7 @@ object CarbonDataRDDFactory extends Logging {
var exception : Exception = null
try {
executeCompaction(carbonLoadModel: CarbonLoadModel,
- hdfsStoreLocation: String,
+ storePath: String,
compactionModel: CompactionModel,
partitioner: Partitioner,
executor, sqlContext, kettleHomePath, storeLocation
@@ -550,7 +550,7 @@ object CarbonDataRDDFactory extends Logging {
val compactionType = CarbonCompactionUtil.determineCompactionType(metadataPath)
val newCarbonLoadModel = new CarbonLoadModel()
- prepareCarbonLoadModel(hdfsStoreLocation, table, newCarbonLoadModel)
+ prepareCarbonLoadModel(storePath, table, newCarbonLoadModel)
val tableCreationTime = CarbonEnv.getInstance(sqlContext).carbonCatalog
.getTableCreationTime(newCarbonLoadModel.getDatabaseName,
newCarbonLoadModel.getTableName
@@ -625,7 +625,7 @@ object CarbonDataRDDFactory extends Logging {
compactionThread.run()
}
- def prepareCarbonLoadModel(hdfsStoreLocation: String,
+ def prepareCarbonLoadModel(storePath: String,
table: CarbonTable,
newCarbonLoadModel: CarbonLoadModel): Unit = {
newCarbonLoadModel.setAggTables(table.getAggregateTablesName.asScala.toArray)
@@ -636,7 +636,7 @@ object CarbonDataRDDFactory extends Logging {
newCarbonLoadModel.setTableName(table.getCarbonTableIdentifier.getTableName)
newCarbonLoadModel.setDatabaseName(table.getCarbonTableIdentifier.getDatabaseName)
newCarbonLoadModel.setStorePath(table.getStorePath)
- readLoadMetadataDetails(newCarbonLoadModel, hdfsStoreLocation)
+ readLoadMetadataDetails(newCarbonLoadModel, storePath)
val loadStartTime = CarbonLoaderUtil.readCurrentTime()
newCarbonLoadModel.setFactTimeStamp(loadStartTime)
}
@@ -661,7 +661,7 @@ object CarbonDataRDDFactory extends Logging {
def loadCarbonData(sqlContext: SQLContext,
carbonLoadModel: CarbonLoadModel,
storeLocation: String,
- hdfsStoreLocation: String,
+ storePath: String,
kettleHomePath: String,
partitioner: Partitioner,
columinar: Boolean,
@@ -708,7 +708,7 @@ object CarbonDataRDDFactory extends Logging {
handleCompactionForSystemLocking(sqlContext,
carbonLoadModel,
partitioner,
- hdfsStoreLocation,
+ storePath,
kettleHomePath,
storeLocation,
CompactionType.MINOR_COMPACTION,
@@ -728,7 +728,7 @@ object CarbonDataRDDFactory extends Logging {
startCompactionThreads(sqlContext,
carbonLoadModel,
partitioner,
- hdfsStoreLocation,
+ storePath,
kettleHomePath,
storeLocation,
compactionModel,
@@ -762,10 +762,10 @@ object CarbonDataRDDFactory extends Logging {
.getDatabaseName + "." + carbonLoadModel.getTableName
)
// Check if any load need to be deleted before loading new data
- deleteLoadsAndUpdateMetadata(carbonLoadModel, carbonTable, partitioner, hdfsStoreLocation,
+ deleteLoadsAndUpdateMetadata(carbonLoadModel, carbonTable, partitioner, storePath,
isForceDeletion = false)
if (null == carbonLoadModel.getLoadMetadataDetails) {
- readLoadMetadataDetails(carbonLoadModel, hdfsStoreLocation)
+ readLoadMetadataDetails(carbonLoadModel, storePath)
}
var currentLoadCount = -1
@@ -951,7 +951,7 @@ object CarbonDataRDDFactory extends Logging {
new DataLoadResultImpl(),
carbonLoadModel,
storeLocation,
- hdfsStoreLocation,
+ storePath,
kettleHomePath,
partitioner,
columinar,
@@ -973,7 +973,7 @@ object CarbonDataRDDFactory extends Logging {
new DataLoadResultImpl(),
carbonLoadModel,
storeLocation,
- hdfsStoreLocation,
+ storePath,
kettleHomePath,
columinar,
currentLoadCount,
@@ -982,7 +982,7 @@ object CarbonDataRDDFactory extends Logging {
rdd).collect()
}
- CarbonLoaderUtil.checkAndCreateCarbonDataLocation(hdfsStoreLocation,
+ CarbonLoaderUtil.checkAndCreateCarbonDataLocation(storePath,
carbonLoadModel.getDatabaseName, carbonLoadModel.getTableName,
partitioner.partitionCount, currentLoadCount.toString)
var loadStatus = CarbonCommonConstants.STORE_LOADSTATUS_SUCCESS
@@ -1085,7 +1085,7 @@ object CarbonDataRDDFactory extends Logging {
}
- def readLoadMetadataDetails(model: CarbonLoadModel, hdfsStoreLocation: String): Unit = {
+ def readLoadMetadataDetails(model: CarbonLoadModel, storePath: String): Unit = {
val metadataPath = model.getCarbonDataLoadSchema.getCarbonTable.getMetaDataFilepath
val segmentStatusManager =
new SegmentStatusManager(
@@ -1097,7 +1097,7 @@ object CarbonDataRDDFactory extends Logging {
def deleteLoadsAndUpdateMetadata(
carbonLoadModel: CarbonLoadModel,
table: CarbonTable, partitioner: Partitioner,
- hdfsStoreLocation: String,
+ storePath: String,
isForceDeletion: Boolean) {
if (LoadMetadataUtil.isLoadDeletionRequired(carbonLoadModel)) {
val loadMetadataFilePath = CarbonLoaderUtil
@@ -1111,7 +1111,7 @@ object CarbonDataRDDFactory extends Logging {
// Delete marked loads
val isUpdationRequired = DeleteLoadFolders
- .deleteLoadFoldersFromFileSystem(carbonLoadModel, hdfsStoreLocation,
+ .deleteLoadFoldersFromFileSystem(carbonLoadModel, storePath,
partitioner.partitionCount, isForceDeletion, details)
if (isUpdationRequired) {
@@ -1164,7 +1164,7 @@ object CarbonDataRDDFactory extends Logging {
def cleanFiles(
sc: SparkContext,
carbonLoadModel: CarbonLoadModel,
- hdfsStoreLocation: String,
+ storePath: String,
partitioner: Partitioner) {
val table = org.apache.carbondata.core.carbon.metadata.CarbonMetadata.getInstance
.getCarbonTable(carbonLoadModel.getDatabaseName + "_" + carbonLoadModel.getTableName)
@@ -1179,7 +1179,7 @@ object CarbonDataRDDFactory extends Logging {
deleteLoadsAndUpdateMetadata(carbonLoadModel,
table,
partitioner,
- hdfsStoreLocation,
+ storePath,
isForceDeletion = true)
}
else {
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala
index f3308d6..8c52249 100644
--- a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala
+++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala
@@ -40,7 +40,7 @@ class CarbonDeleteLoadByDateRDD[K, V](
partitioner: Partitioner,
factTableName: String,
dimTableName: String,
- hdfsStoreLocation: String,
+ storePath: String,
loadMetadataDetails: List[LoadMetadataDetails])
extends RDD[(K, V)](sc, Nil) with Logging {
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
index 1f016ba..2bfd99d 100644
--- a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
+++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
@@ -59,7 +59,7 @@ class CarbonMergerRDD[K, V](
sc.setLocalProperty("spark.job.interruptOnCancel", "true")
var storeLocation: String = null
- val hdfsStoreLocation = carbonMergerMapping.hdfsStoreLocation
+ val storePath = carbonMergerMapping.storePath
val metadataFilePath = carbonMergerMapping.metadataFilePath
val mergedLoadName = carbonMergerMapping.mergedLoadName
val databaseName = carbonMergerMapping.databaseName
@@ -120,10 +120,10 @@ class CarbonMergerRDD[K, V](
val dataFileMetadataSegMapping: java.util.Map[String, List[DataFileFooter]] =
CarbonCompactionUtil.createDataFileFooterMappingForSegments(tableBlockInfoList)
- carbonLoadModel.setStorePath(hdfsStoreLocation)
+ carbonLoadModel.setStorePath(storePath)
exec = new CarbonCompactionExecutor(segmentMapping, segmentProperties, databaseName,
- factTableName, hdfsStoreLocation, carbonLoadModel.getCarbonDataLoadSchema.getCarbonTable,
+ factTableName, storePath, carbonLoadModel.getCarbonDataLoadSchema.getCarbonTable,
dataFileMetadataSegMapping
)
@@ -221,7 +221,7 @@ class CarbonMergerRDD[K, V](
val startTime = System.currentTimeMillis()
val absoluteTableIdentifier: AbsoluteTableIdentifier = new AbsoluteTableIdentifier(
- hdfsStoreLocation, new CarbonTableIdentifier(databaseName, factTableName, tableId)
+ storePath, new CarbonTableIdentifier(databaseName, factTableName, tableId)
)
val (carbonInputFormat: CarbonInputFormat[Array[Object]], job: Job) =
QueryPlanUtil.createCarbonInputFormat(absoluteTableIdentifier)
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala
index 172ca16..28c37f3 100644
--- a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala
+++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala
@@ -39,7 +39,7 @@ object Compactor {
def triggerCompaction(compactionCallableModel: CompactionCallableModel): Unit = {
- val hdfsStoreLocation = compactionCallableModel.hdfsStoreLocation
+ val storePath = compactionCallableModel.storePath
val partitioner = compactionCallableModel.partitioner
val storeLocation = compactionCallableModel.storeLocation
val carbonTable = compactionCallableModel.carbonTable
@@ -55,12 +55,11 @@ object Compactor {
var finalMergeStatus = false
val schemaName: String = carbonLoadModel.getDatabaseName
val factTableName = carbonLoadModel.getTableName
- val storePath = hdfsStoreLocation
val validSegments: Array[String] = CarbonDataMergerUtil
.getValidSegments(loadsToMerge).split(',')
val mergeLoadStartTime = CarbonLoaderUtil.readCurrentTime();
val carbonMergerMapping = CarbonMergerMapping(storeLocation,
- hdfsStoreLocation,
+ storePath,
partitioner,
carbonTable.getMetaDataFilepath(),
mergedLoadName,
@@ -73,7 +72,7 @@ object Compactor {
maxSegmentColCardinality = null,
maxSegmentColumnSchemaList = null
)
- carbonLoadModel.setStorePath(carbonMergerMapping.hdfsStoreLocation)
+ carbonLoadModel.setStorePath(carbonMergerMapping.storePath)
val segmentStatusManager = new SegmentStatusManager(new AbsoluteTableIdentifier
(CarbonProperties.getInstance().getProperty(CarbonCommonConstants.STORE_LOCATION),
new CarbonTableIdentifier(carbonLoadModel.getDatabaseName,
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a65ca7cc/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index 908952e..f285984 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -146,7 +146,7 @@ case class Default(key: String, value: String)
case class DataLoadTableFileMapping(table: String, loadPath: String)
case class CarbonMergerMapping(storeLocation: String,
- hdfsStoreLocation: String,
+ storePath: String,
partitioner: Partitioner,
metadataFilePath: String,
mergedLoadName: String,
@@ -173,7 +173,7 @@ case class CompactionModel(compactionSize: Long,
tableCreationTime: Long,
isDDLTrigger: Boolean)
-case class CompactionCallableModel(hdfsStoreLocation: String, carbonLoadModel: CarbonLoadModel,
+case class CompactionCallableModel(storePath: String, carbonLoadModel: CarbonLoadModel,
partitioner: Partitioner, storeLocation: String, carbonTable: CarbonTable, kettleHomePath: String,
cubeCreationTime: Long, loadsToMerge: util.List[LoadMetadataDetails], sqlContext: SQLContext,
compactionType: CompactionType)
[2/2] incubator-carbondata git commit: [CARBONDATA-383]Optimize
hdfsStoreLocation/hdfsStorePath parameters' name since Carbon not only
support hdfs path This closes #298
Posted by ch...@apache.org.
[CARBONDATA-383]Optimize hdfsStoreLocation/hdfsStorePath parameters' name since Carbon not only support hdfs path This closes #298
Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/b4c3e511
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/b4c3e511
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/b4c3e511
Branch: refs/heads/master
Commit: b4c3e51155f37d205e1beb84654ddcfec564ad0c
Parents: 97377af a65ca7c
Author: chenliang613 <ch...@apache.org>
Authored: Wed Nov 9 17:08:00 2016 +0800
Committer: chenliang613 <ch...@apache.org>
Committed: Wed Nov 9 17:08:00 2016 +0800
----------------------------------------------------------------------
.../CarbonDictionaryMetadataReaderImpl.java | 12 ++--
.../core/reader/CarbonDictionaryReaderImpl.java | 14 ++--
.../CarbonDictionarySortIndexReaderImpl.java | 2 +-
.../core/writer/CarbonDictionaryWriterImpl.java | 14 ++--
...CarbonDictionarySortIndexReaderImplTest.java | 16 ++---
...CarbonDictionarySortIndexWriterImplTest.java | 14 ++--
.../carbondata/spark/load/CarbonLoaderUtil.java | 4 +-
.../carbondata/spark/CarbonSparkFactory.scala | 2 +-
.../spark/DictionaryDetailHelper.scala | 4 +-
.../spark/rdd/CarbonDataLoadRDD.scala | 16 ++---
.../spark/rdd/CarbonDataRDDFactory.scala | 70 ++++++++++----------
.../spark/rdd/CarbonDeleteLoadByDateRDD.scala | 2 +-
.../carbondata/spark/rdd/CarbonMergerRDD.scala | 8 +--
.../apache/carbondata/spark/rdd/Compactor.scala | 7 +-
.../execution/command/carbonTableSchema.scala | 4 +-
15 files changed, 93 insertions(+), 96 deletions(-)
----------------------------------------------------------------------