You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2017/05/11 13:53:25 UTC
[35/50] carbondata git commit: added support for int to long data
type change
added support for int to long data type change
Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/e1e6e578
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/e1e6e578
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/e1e6e578
Branch: refs/heads/branch-1.1
Commit: e1e6e5782bd509e35b18f769bbfcfe99c0b037c4
Parents: 393cd6f
Author: kunal642 <ku...@knoldus.in>
Authored: Thu Apr 27 15:08:17 2017 +0530
Committer: chenliang613 <ch...@huawei.com>
Committed: Tue May 9 15:22:32 2017 +0800
----------------------------------------------------------------------
.../apache/carbondata/spark/util/CarbonScalaUtil.scala | 4 ++--
.../apache/spark/sql/catalyst/CarbonDDLSqlParser.scala | 6 +++---
.../rowreader/ChangeDataTypeTestCases.scala | 13 +++++++++++++
3 files changed, 18 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1e6e578/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
index 50ebdcb..fb01b27 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
@@ -151,12 +151,12 @@ object CarbonScalaUtil {
def validateColumnDataType(dataTypeInfo: DataTypeInfo, carbonColumn: CarbonColumn): Unit = {
carbonColumn.getDataType.getName match {
case "INT" =>
- if (!dataTypeInfo.dataType.equals("bigint")) {
+ if (!dataTypeInfo.dataType.equals("bigint") && !dataTypeInfo.dataType.equals("long")) {
sys
.error(s"Given column ${ carbonColumn.getColName } with data type ${
carbonColumn
.getDataType.getName
- } cannot be modified. Int can only be changed to bigInt")
+ } cannot be modified. Int can only be changed to bigInt or long")
}
case "DECIMAL" =>
if (!dataTypeInfo.dataType.equals("decimal")) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1e6e578/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
index dad5716..afc4a58 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
@@ -237,8 +237,8 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
tableProperties: mutable.Map[String, String],
bucketFields: Option[BucketFields], isAlterFlow: Boolean = false): TableModel = {
- fields.zipWithIndex.foreach { x =>
- x._1.schemaOrdinal = x._2
+ fields.zipWithIndex.foreach { case (field, index) =>
+ field.schemaOrdinal = index
}
val (dims: Seq[Field], noDictionaryDims: Seq[String]) = extractDimColsAndNoDictionaryFields(
fields, tableProperties)
@@ -1049,7 +1049,7 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
*/
protected def parseDataType(dataType: String, values: Option[List[(Int, Int)]]): DataTypeInfo = {
dataType match {
- case "bigint" =>
+ case "bigint" | "long" =>
if (values.isDefined) {
throw new MalformedCarbonCommandException("Invalid data type")
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1e6e578/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/ChangeDataTypeTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/ChangeDataTypeTestCases.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/ChangeDataTypeTestCases.scala
index 35057b2..e3fec55 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/ChangeDataTypeTestCases.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/ChangeDataTypeTestCases.scala
@@ -105,6 +105,19 @@ class ChangeDataTypeTestCases extends QueryTest with BeforeAndAfterAll {
afterAll
}
+ test("test to change int datatype to long") {
+ beforeAll
+ sql(
+ "CREATE TABLE changedatatypetest(intField int,stringField string,charField string," +
+ "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
+ s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
+ s"decimalField')")
+ sql("Alter table changedatatypetest change intField intField long")
+ checkAnswer(sql("select intField from changedatatypetest limit 1"), Row(100))
+ afterAll
+ }
+
override def afterAll {
sql("DROP TABLE IF EXISTS changedatatypetest")
sql("drop table if exists hivetable")