You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ch...@apache.org on 2016/06/30 17:42:18 UTC
[31/50] [abbrv] incubator-carbondata git commit: [BUG] Fixed Carbon
Table Path while registering as External Table to Hive (#768)
[BUG] Fixed Carbon Table Path while registering as External Table to Hive (#768)
Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/3718dc2f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/3718dc2f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/3718dc2f
Branch: refs/heads/master
Commit: 3718dc2fda6b45cf46e50944d23a13aa5d945f89
Parents: 5e1a67b
Author: nareshpr <pr...@gmail.com>
Authored: Tue Jun 28 03:16:41 2016 +0530
Committer: Venkata Ramana G <g....@gmail.com>
Committed: Tue Jun 28 03:16:41 2016 +0530
----------------------------------------------------------------------
.../carbondata/core/carbon/path/CarbonTablePath.java | 2 +-
.../apache/spark/sql/CarbonDatasourceRelation.scala | 14 ++++++++------
.../sql/execution/command/carbonTableSchema.scala | 5 +++--
.../spark/sql/hive/CarbonMetastoreCatalog.scala | 2 +-
4 files changed, 13 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/3718dc2f/core/src/main/java/org/carbondata/core/carbon/path/CarbonTablePath.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/carbon/path/CarbonTablePath.java b/core/src/main/java/org/carbondata/core/carbon/path/CarbonTablePath.java
index b764458..bf2ef57 100644
--- a/core/src/main/java/org/carbondata/core/carbon/path/CarbonTablePath.java
+++ b/core/src/main/java/org/carbondata/core/carbon/path/CarbonTablePath.java
@@ -47,7 +47,7 @@ public class CarbonTablePath extends Path {
protected static final String PARTITION_PREFIX = "Part";
protected static final String CARBON_DATA_EXT = ".carbondata";
protected static final String DATA_PART_PREFIX = "part";
- private static final String INDEX_FILE_EXT = ".carbonindex";
+ protected static final String INDEX_FILE_EXT = ".carbonindex";
protected String tablePath;
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/3718dc2f/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
index f95acf4..9f534c1 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
@@ -51,12 +51,14 @@ class CarbonSource
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String]): BaseRelation = {
- parameters.get("path") match {
- case Some(path) => CarbonDatasourceHadoopRelation(sqlContext, Array(path), parameters)
- case _ =>
- val options = new CarbonOption(parameters)
- val tableIdentifier = options.tableIdentifier.split("""\.""").toSeq
- CarbonDatasourceRelation(tableIdentifier, None)(sqlContext)
+ if (parameters.get("tablePath") != None) {
+ val options = new CarbonOption(parameters)
+ val tableIdentifier = options.tableIdentifier.split("""\.""").toSeq
+ CarbonDatasourceRelation(tableIdentifier, None)(sqlContext)
+ } else if (parameters.get("path") != None) {
+ CarbonDatasourceHadoopRelation(sqlContext, Array(parameters.get("path").get), parameters)
+ } else {
+ sys.error("Carbon table path not found")
}
}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/3718dc2f/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index 8868367..c97a2fe 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -1260,11 +1260,12 @@ private[sql] case class CreateCube(cm: tableModel) extends RunnableCommand {
// Add Database to catalog and persist
val catalog = CarbonEnv.getInstance(sqlContext).carbonCatalog
// Need to fill partitioner class when we support partition
- val cubePath = catalog.createCubeFromThrift(tableInfo, dbName, tbName, null)(sqlContext)
+ val tablePath = catalog.createCubeFromThrift(tableInfo, dbName, tbName, null)(sqlContext)
try {
sqlContext.sql(
s"""CREATE TABLE $dbName.$tbName USING org.apache.spark.sql.CarbonSource""" +
- s""" OPTIONS (cubename "$dbName.$tbName", tablePath "$cubePath") """).collect
+ s""" OPTIONS (cubename "$dbName.$tbName", tablePath "$tablePath", path "$tablePath") """)
+ .collect
} catch {
case e: Exception =>
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/3718dc2f/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
index 56c6574..f1c8721 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
@@ -355,7 +355,7 @@ class CarbonMetastoreCatalog(hive: HiveContext, val storePath: String, client: C
logInfo(s"Table $tableName for Database $dbName created successfully.")
LOGGER.info("Table " + tableName + " for Database " + dbName + " created successfully.")
updateSchemasUpdatedTime(dbName, tableName)
- schemaMetadataPath
+ carbonTablePath.getPath
}
private def updateMetadataByWrapperTable(